[ 463.021261] env[63489]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63489) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 463.021627] env[63489]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63489) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 463.021754] env[63489]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63489) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 463.022028] env[63489]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 463.119215] env[63489]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63489) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 463.129096] env[63489]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63489) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 463.732098] env[63489]: INFO nova.virt.driver [None req-fe50f9bb-d929-4651-8a36-0d9621be7203 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 463.803060] env[63489]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 463.803164] env[63489]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 463.803618] env[63489]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63489) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 466.907359] env[63489]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-ad706279-c40c-4b20-82ae-0c3e3cff1405 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.923130] env[63489]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63489) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 466.923351] env[63489]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-625f97ad-7a33-4197-9bff-9db2cd84f338 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.960833] env[63489]: INFO oslo_vmware.api [-] Successfully established new session; session ID is e4144. [ 466.961060] env[63489]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.158s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 466.961577] env[63489]: INFO nova.virt.vmwareapi.driver [None req-fe50f9bb-d929-4651-8a36-0d9621be7203 None None] VMware vCenter version: 7.0.3 [ 466.965066] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b18cdac-c16a-42ee-acd5-8130ea489304 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.982980] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2e31fb-f871-4256-a802-7268497d5b93 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.989009] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a1b413-854b-4a33-bc8a-d0bd493f845a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.995518] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac840bd-a92c-4460-a6bf-13a1f6955c84 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.008351] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff5d11f-c9bd-476d-8743-a4ba22003061 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.014040] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b018fe04-9b95-4506-a7ba-aa7d05067be0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.043625] env[63489]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-1ec20917-f68a-49dd-9848-d6aade4cee65 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 467.048857] env[63489]: DEBUG nova.virt.vmwareapi.driver [None req-fe50f9bb-d929-4651-8a36-0d9621be7203 None None] Extension org.openstack.compute already exists. {{(pid=63489) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 467.051673] env[63489]: INFO nova.compute.provider_config [None req-fe50f9bb-d929-4651-8a36-0d9621be7203 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 467.554704] env[63489]: DEBUG nova.context [None req-fe50f9bb-d929-4651-8a36-0d9621be7203 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),b90c3b2e-3a7b-4b27-a914-ccc58421c7a5(cell1) {{(pid=63489) load_cells /opt/stack/nova/nova/context.py:464}} [ 467.556776] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 467.557063] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 467.557745] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 467.558197] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Acquiring lock "b90c3b2e-3a7b-4b27-a914-ccc58421c7a5" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 467.558387] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Lock "b90c3b2e-3a7b-4b27-a914-ccc58421c7a5" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 467.559423] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Lock "b90c3b2e-3a7b-4b27-a914-ccc58421c7a5" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 467.579468] env[63489]: INFO dbcounter [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Registered counter for database nova_cell0 [ 467.587570] env[63489]: INFO dbcounter [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Registered counter for database nova_cell1 [ 467.590860] env[63489]: DEBUG oslo_db.sqlalchemy.engines [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63489) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 467.591446] env[63489]: DEBUG oslo_db.sqlalchemy.engines [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63489) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 467.596040] env[63489]: ERROR nova.db.main.api [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 467.596040] env[63489]: result = function(*args, **kwargs) [ 467.596040] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 467.596040] env[63489]: return func(*args, **kwargs) [ 467.596040] env[63489]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 467.596040] env[63489]: result = fn(*args, **kwargs) [ 467.596040] env[63489]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 467.596040] env[63489]: return f(*args, **kwargs) [ 467.596040] env[63489]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 467.596040] env[63489]: return db.service_get_minimum_version(context, binaries) [ 467.596040] env[63489]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 467.596040] env[63489]: _check_db_access() [ 467.596040] env[63489]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 467.596040] env[63489]: stacktrace = ''.join(traceback.format_stack()) [ 467.596040] env[63489]: [ 467.596828] env[63489]: ERROR nova.db.main.api [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 467.596828] env[63489]: result = function(*args, **kwargs) [ 467.596828] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 467.596828] env[63489]: return func(*args, **kwargs) [ 467.596828] env[63489]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 467.596828] env[63489]: result = fn(*args, **kwargs) [ 467.596828] env[63489]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 467.596828] env[63489]: return f(*args, **kwargs) [ 467.596828] env[63489]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 467.596828] env[63489]: return db.service_get_minimum_version(context, binaries) [ 467.596828] env[63489]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 467.596828] env[63489]: _check_db_access() [ 467.596828] env[63489]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 467.596828] env[63489]: stacktrace = ''.join(traceback.format_stack()) [ 467.596828] env[63489]: [ 467.597278] env[63489]: WARNING nova.objects.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 467.597356] env[63489]: WARNING nova.objects.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Failed to get minimum service version for cell b90c3b2e-3a7b-4b27-a914-ccc58421c7a5 [ 467.597775] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Acquiring lock "singleton_lock" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 467.597965] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Acquired lock "singleton_lock" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 467.598248] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Releasing lock "singleton_lock" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 467.598564] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Full set of CONF: {{(pid=63489) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 467.598709] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ******************************************************************************** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 467.598840] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Configuration options gathered from: {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 467.598988] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 467.599204] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 467.599334] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ================================================================================ {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 467.599550] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] allow_resize_to_same_host = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.599722] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] arq_binding_timeout = 300 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.599856] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] backdoor_port = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.599985] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] backdoor_socket = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.600166] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] block_device_allocate_retries = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.600328] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] block_device_allocate_retries_interval = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.600498] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cert = self.pem {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.600668] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.600839] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute_monitors = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601021] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] config_dir = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601197] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] config_drive_format = iso9660 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601336] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601502] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] config_source = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601670] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] console_host = devstack {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601837] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] control_exchange = nova {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.601998] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cpu_allocation_ratio = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.602177] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] daemon = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.602347] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] debug = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.602505] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] default_access_ip_network_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.602673] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] default_availability_zone = nova {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.602831] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] default_ephemeral_format = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.602992] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] default_green_pool_size = 1000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.603244] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.603412] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] default_schedule_zone = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.603574] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] disk_allocation_ratio = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.603739] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] enable_new_services = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.603918] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] enabled_apis = ['osapi_compute'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.604099] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] enabled_ssl_apis = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.604265] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] flat_injected = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.604427] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] force_config_drive = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.604588] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] force_raw_images = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.604756] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] graceful_shutdown_timeout = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.604920] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] heal_instance_info_cache_interval = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.605150] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] host = cpu-1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.605332] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.605500] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] initial_disk_allocation_ratio = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.605667] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] initial_ram_allocation_ratio = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.605893] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.606083] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_build_timeout = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.606253] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_delete_interval = 300 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.606422] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_format = [instance: %(uuid)s] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.606592] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_name_template = instance-%08x {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.606759] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_usage_audit = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.606959] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_usage_audit_period = month {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.607167] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.607341] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] instances_path = /opt/stack/data/nova/instances {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.607509] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] internal_service_availability_zone = internal {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.607667] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] key = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.607831] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] live_migration_retry_count = 30 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.608049] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_color = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.608236] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_config_append = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.608409] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.608576] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_dir = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.608737] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.608868] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_options = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.609042] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_rotate_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.609223] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_rotate_interval_type = days {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.609391] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] log_rotation_type = none {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.609523] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.609651] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.609819] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.610030] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.610188] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.610358] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] long_rpc_timeout = 1800 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.610520] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] max_concurrent_builds = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.610681] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] max_concurrent_live_migrations = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.610842] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] max_concurrent_snapshots = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.611009] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] max_local_block_devices = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.611184] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] max_logfile_count = 30 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.611346] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] max_logfile_size_mb = 200 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.611506] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] maximum_instance_delete_attempts = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.611675] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metadata_listen = 0.0.0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.611842] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metadata_listen_port = 8775 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.612014] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metadata_workers = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.612182] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] migrate_max_retries = -1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.612354] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] mkisofs_cmd = genisoimage {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.612562] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] my_block_storage_ip = 10.180.1.21 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.612698] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] my_ip = 10.180.1.21 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.612863] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] network_allocate_retries = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.613053] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.613231] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] osapi_compute_listen = 0.0.0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.613395] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] osapi_compute_listen_port = 8774 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.613563] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] osapi_compute_unique_server_name_scope = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.613732] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] osapi_compute_workers = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.613898] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] password_length = 12 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.614070] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] periodic_enable = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.614233] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] periodic_fuzzy_delay = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.614400] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] pointer_model = usbtablet {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.614571] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] preallocate_images = none {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.614727] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] publish_errors = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.614856] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] pybasedir = /opt/stack/nova {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615018] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ram_allocation_ratio = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615182] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] rate_limit_burst = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615350] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] rate_limit_except_level = CRITICAL {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615509] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] rate_limit_interval = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615666] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reboot_timeout = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615823] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reclaim_instance_interval = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.615979] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] record = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.616157] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reimage_timeout_per_gb = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.616322] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] report_interval = 120 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.616480] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] rescue_timeout = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.616638] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reserved_host_cpus = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.616796] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reserved_host_disk_mb = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.616983] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reserved_host_memory_mb = 512 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.617166] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] reserved_huge_pages = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.617331] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] resize_confirm_window = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.617493] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] resize_fs_using_block_device = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.617651] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] resume_guests_state_on_host_boot = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.617820] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.618047] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] rpc_response_timeout = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.618240] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] run_external_periodic_tasks = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.618416] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] running_deleted_instance_action = reap {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.618580] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] running_deleted_instance_poll_interval = 1800 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.618741] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] running_deleted_instance_timeout = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.618901] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler_instance_sync_interval = 120 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.619100] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_down_time = 720 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.619285] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] servicegroup_driver = db {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.619438] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] shell_completion = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.619597] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] shelved_offload_time = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.619756] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] shelved_poll_interval = 3600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.619922] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] shutdown_timeout = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.620101] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] source_is_ipv6 = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.620262] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ssl_only = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.620508] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.620676] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] sync_power_state_interval = 600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.620839] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] sync_power_state_pool_size = 1000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621014] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] syslog_log_facility = LOG_USER {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621185] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] tempdir = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621345] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] timeout_nbd = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621510] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] transport_url = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621669] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] update_resources_interval = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621827] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_cow_images = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.621990] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_eventlog = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.622208] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_journal = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.622375] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_json = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.622534] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_rootwrap_daemon = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.622692] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_stderr = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.622850] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] use_syslog = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.623011] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vcpu_pin_set = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.623191] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plugging_is_fatal = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.623365] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plugging_timeout = 300 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.623525] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] virt_mkfs = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.623686] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] volume_usage_poll_interval = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.623846] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] watch_log_file = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.624025] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] web = /usr/share/spice-html5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 467.624213] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_concurrency.disable_process_locking = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.624751] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.624951] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.625143] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.625334] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.625514] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.625684] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.625870] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.auth_strategy = keystone {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.626054] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.compute_link_prefix = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.626243] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.626422] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.dhcp_domain = novalocal {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.626594] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.enable_instance_password = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.626762] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.glance_link_prefix = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.626955] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.627161] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.627333] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.instance_list_per_project_cells = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.627499] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.list_records_by_skipping_down_cells = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.627665] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.local_metadata_per_cell = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.627838] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.max_limit = 1000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.628053] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.metadata_cache_expiration = 15 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.628246] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.neutron_default_tenant_id = default {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.628423] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.response_validation = warn {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.628596] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.use_neutron_default_nets = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.628766] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.628932] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.629119] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.629306] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.629477] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_dynamic_targets = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.629642] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_jsonfile_path = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.629825] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.630031] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.backend = dogpile.cache.memcached {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.630212] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.backend_argument = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.630387] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.config_prefix = cache.oslo {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.630557] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.dead_timeout = 60.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.630726] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.debug_cache_backend = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.630892] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.enable_retry_client = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.631070] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.enable_socket_keepalive = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.631249] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.enabled = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.631419] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.enforce_fips_mode = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.631586] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.expiration_time = 600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.631750] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.hashclient_retry_attempts = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.631917] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.hashclient_retry_delay = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.632096] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_dead_retry = 300 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.632263] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_password = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.632427] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.632591] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.632757] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_pool_maxsize = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.632920] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.633097] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_sasl_enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.633279] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.633446] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_socket_timeout = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.633608] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.memcache_username = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.633773] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.proxies = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.633937] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_db = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.634148] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_password = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.634343] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_sentinel_service_name = mymaster {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.634524] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.634697] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_server = localhost:6379 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.634888] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_socket_timeout = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.635072] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.redis_username = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.635242] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.retry_attempts = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.635425] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.retry_delay = 0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.635587] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.socket_keepalive_count = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.635746] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.socket_keepalive_idle = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.635909] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.socket_keepalive_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.636086] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.tls_allowed_ciphers = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.636252] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.tls_cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.636411] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.tls_certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.636574] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.tls_enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.636734] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cache.tls_keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.636925] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.637126] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.auth_type = password {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.637301] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.637481] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.catalog_info = volumev3::publicURL {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.637645] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.637811] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.637978] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.cross_az_attach = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.638157] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.debug = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.638319] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.endpoint_template = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.638485] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.http_retries = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.638648] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.638807] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.638980] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.os_region_name = RegionOne {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.639162] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.639325] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cinder.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.639499] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.639662] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.cpu_dedicated_set = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.639821] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.cpu_shared_set = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.639988] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.image_type_exclude_list = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.640177] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.640344] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.max_concurrent_disk_ops = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.640508] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.max_disk_devices_to_attach = -1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.640671] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.640841] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.641019] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.resource_provider_association_refresh = 300 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.641186] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.641350] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.shutdown_retry_interval = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.641533] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.641713] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] conductor.workers = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.641895] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] console.allowed_origins = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.642071] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] console.ssl_ciphers = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.642247] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] console.ssl_minimum_version = default {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.642418] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] consoleauth.enforce_session_timeout = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.642592] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] consoleauth.token_ttl = 600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.642769] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.642930] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.643110] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.643274] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.643438] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.643598] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.643761] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.643920] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.644135] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.644254] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.644412] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.region_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.644572] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.644730] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.644940] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.service_type = accelerator {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.645100] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.645266] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.645426] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.645585] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.645767] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.645931] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] cyborg.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.646129] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.backend = sqlalchemy {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.646302] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.connection = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.646470] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.connection_debug = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.646642] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.connection_parameters = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.646810] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.connection_recycle_time = 3600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.647020] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.connection_trace = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.647206] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.db_inc_retry_interval = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.647376] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.db_max_retries = 20 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.647542] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.db_max_retry_interval = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.647707] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.db_retry_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.647874] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.max_overflow = 50 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.648101] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.max_pool_size = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.648290] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.max_retries = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.648464] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.648626] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.mysql_wsrep_sync_wait = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.648786] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.pool_timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.648954] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.retry_interval = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.649128] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.slave_connection = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.649296] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.sqlite_synchronous = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.649459] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] database.use_db_reconnect = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.649679] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.backend = sqlalchemy {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.649888] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.connection = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.650085] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.connection_debug = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.650267] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.connection_parameters = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.650435] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.connection_recycle_time = 3600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.650601] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.connection_trace = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.650765] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.db_inc_retry_interval = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.650932] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.db_max_retries = 20 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.651109] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.db_max_retry_interval = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.651276] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.db_retry_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.651440] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.max_overflow = 50 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.651602] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.max_pool_size = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.651765] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.max_retries = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.651934] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.652109] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.652274] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.pool_timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.652439] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.retry_interval = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.652600] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.slave_connection = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.652760] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] api_database.sqlite_synchronous = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.652937] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] devices.enabled_mdev_types = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.653131] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.653308] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ephemeral_storage_encryption.default_format = luks {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.653473] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ephemeral_storage_encryption.enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.653641] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.653816] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.api_servers = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.653982] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.654165] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.654335] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.654497] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.654660] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.654841] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.debug = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.655045] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.default_trusted_certificate_ids = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.655205] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.enable_certificate_validation = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.655372] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.enable_rbd_download = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.655533] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.655700] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.655861] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.656029] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.656196] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.656362] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.num_retries = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.656533] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.rbd_ceph_conf = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.656701] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.rbd_connect_timeout = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.656870] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.rbd_pool = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.657108] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.rbd_user = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.657291] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.region_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.657458] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.657621] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.657793] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.service_type = image {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.657961] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.658137] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.658301] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.658460] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.658653] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.658821] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.verify_glance_signatures = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.658982] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] glance.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.659171] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] guestfs.debug = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.659343] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] mks.enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.659702] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.659900] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] image_cache.manager_interval = 2400 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.660135] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] image_cache.precache_concurrency = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.660319] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] image_cache.remove_unused_base_images = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.660495] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.660666] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.660844] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] image_cache.subdirectory_name = _base {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.661037] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.api_max_retries = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.661214] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.api_retry_interval = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.661379] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.661545] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.auth_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.661707] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.661867] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.662043] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.662216] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.conductor_group = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.662377] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.662537] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.662697] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.662860] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.663063] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.663248] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.663411] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.663579] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.peer_list = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.663741] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.region_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.663903] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.664085] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.serial_console_state_timeout = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.664248] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.664420] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.service_type = baremetal {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.664582] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.shard = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.664750] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.664963] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.665211] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.665408] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.665590] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.665755] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ironic.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.665939] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.666195] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] key_manager.fixed_key = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.666400] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.666569] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.barbican_api_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.666733] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.barbican_endpoint = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.666924] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.barbican_endpoint_type = public {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.667120] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.barbican_region_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.667290] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.667451] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.667616] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.667779] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.667940] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.668117] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.number_of_retries = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.668284] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.retry_delay = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.668449] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.send_service_user_token = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.668613] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.668774] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.668936] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.verify_ssl = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.669178] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican.verify_ssl_path = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.669398] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.669574] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.auth_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.669740] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.669903] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.670087] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.670255] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.670417] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.670581] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.670742] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] barbican_service_user.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.670913] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.approle_role_id = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.671088] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.approle_secret_id = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.671264] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.kv_mountpoint = secret {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.671425] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.kv_path = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.671592] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.kv_version = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.671755] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.namespace = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.671915] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.root_token_id = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.672125] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.ssl_ca_crt_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.672312] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.timeout = 60.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.672479] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.use_ssl = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.672652] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.672834] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673012] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.auth_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673183] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673345] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673509] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673671] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673834] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.673992] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.674171] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.674332] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.674493] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.674650] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.674828] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.region_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.675030] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.675243] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.675394] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.service_type = identity {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.675560] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.675720] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.675881] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.676052] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.676241] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.676402] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] keystone.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.676605] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.connection_uri = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.676769] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_mode = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.676964] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_model_extra_flags = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.677154] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_models = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.677332] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_power_governor_high = performance {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.677505] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_power_governor_low = powersave {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.677670] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_power_management = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.677843] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.678072] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.device_detach_attempts = 8 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.678224] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.device_detach_timeout = 20 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.678394] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.disk_cachemodes = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.678556] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.disk_prefix = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.678721] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.enabled_perf_events = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.678887] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.file_backed_memory = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.679069] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.gid_maps = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.679236] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.hw_disk_discard = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.679400] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.hw_machine_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.679571] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_rbd_ceph_conf = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.679737] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.679900] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.680085] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_rbd_glance_store_name = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.680263] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_rbd_pool = rbd {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.680440] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_type = default {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.680605] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.images_volume_group = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.680770] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.inject_key = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.680959] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.inject_partition = -2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.681164] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.inject_password = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.681335] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.iscsi_iface = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.681501] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.iser_use_multipath = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.681668] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_bandwidth = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.681834] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.681998] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_downtime = 500 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.682179] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.682346] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.682508] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_inbound_addr = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.682674] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.682836] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_permit_post_copy = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.682998] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_scheme = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.683189] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_timeout_action = abort {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.683362] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_tunnelled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.683527] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_uri = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.683694] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.live_migration_with_native_tls = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.683858] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.max_queues = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.684073] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.684329] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.684501] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.nfs_mount_options = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.684795] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.684974] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.685163] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.num_iser_scan_tries = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.685339] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.num_memory_encrypted_guests = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.685496] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.685663] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.num_pcie_ports = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.685832] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.num_volume_scan_tries = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.686006] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.pmem_namespaces = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.686182] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.quobyte_client_cfg = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.686467] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.686644] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rbd_connect_timeout = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.686812] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.687046] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.687235] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rbd_secret_uuid = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.687401] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rbd_user = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.687570] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.687748] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.remote_filesystem_transport = ssh {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.687925] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rescue_image_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.688103] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rescue_kernel_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.688271] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rescue_ramdisk_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.688440] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.688601] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.rx_queue_size = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.688772] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.smbfs_mount_options = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.689067] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.689248] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.snapshot_compression = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.689412] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.snapshot_image_format = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.689635] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.689804] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.sparse_logical_volumes = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.689969] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.swtpm_enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.690188] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.swtpm_group = tss {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.690370] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.swtpm_user = tss {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.690541] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.sysinfo_serial = unique {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.690703] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.tb_cache_size = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.690864] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.tx_queue_size = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.691049] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.uid_maps = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.691224] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.use_virtio_for_bridges = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.691396] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.virt_type = kvm {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.691570] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.volume_clear = zero {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.691736] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.volume_clear_size = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.691904] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.volume_use_multipath = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.692079] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_cache_path = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.692256] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.692429] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_mount_group = qemu {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.692598] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_mount_opts = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.692770] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.693090] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.693291] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.vzstorage_mount_user = stack {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.693465] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.693645] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.693819] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.auth_type = password {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.693983] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.694164] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.694330] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.694491] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.694682] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.694823] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.default_floating_pool = public {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.694978] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.695157] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.extension_sync_interval = 600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.695322] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.http_retries = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.695483] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.695640] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.695796] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.695964] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.696183] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.696362] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.ovs_bridge = br-int {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.696533] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.physnets = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.696704] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.region_name = RegionOne {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.696865] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.697076] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.service_metadata_proxy = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.697248] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.697420] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.service_type = network {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.697586] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.697746] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.697922] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.698108] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.698295] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.698459] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] neutron.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.698633] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] notifications.bdms_in_notifications = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.698814] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] notifications.default_level = INFO {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.698988] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] notifications.notification_format = unversioned {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.699201] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] notifications.notify_on_state_change = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.699386] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.699563] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] pci.alias = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.699738] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] pci.device_spec = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.699905] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] pci.report_in_placement = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.700099] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.700278] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.auth_type = password {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.700448] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.700611] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.700771] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.700937] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.701111] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.701274] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.701436] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.default_domain_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.701595] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.default_domain_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.701751] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.domain_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.701911] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.domain_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.702100] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.702282] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.702444] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.702602] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.702760] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.702927] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.password = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.703115] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.project_domain_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.703286] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.project_domain_name = Default {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.703455] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.project_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.703628] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.project_name = service {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.703797] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.region_name = RegionOne {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.703962] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.704141] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.704315] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.service_type = placement {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.704480] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.704640] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.704808] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.704969] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.system_scope = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.705177] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.705346] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.trust_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.705524] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.user_domain_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.705675] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.user_domain_name = Default {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.705835] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.user_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.706016] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.username = nova {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.706212] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.706378] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] placement.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.706562] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.cores = 20 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.706734] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.count_usage_from_placement = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.706932] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.707135] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.injected_file_content_bytes = 10240 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.707320] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.injected_file_path_length = 255 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.707489] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.injected_files = 5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.707656] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.instances = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.707827] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.key_pairs = 100 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.708046] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.metadata_items = 128 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.708236] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.ram = 51200 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.708407] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.recheck_quota = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.708580] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.server_group_members = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.708749] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] quota.server_groups = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.708931] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.709114] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.709282] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.image_metadata_prefilter = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.709446] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.709612] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.max_attempts = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.709777] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.max_placement_results = 1000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.709944] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.710123] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.query_placement_for_image_type_support = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.710289] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.710464] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] scheduler.workers = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.710641] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.710814] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.711058] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.711266] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.711443] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.711611] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.711779] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.711971] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.712162] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.host_subset_size = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.712332] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.712496] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.712662] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.712829] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.isolated_hosts = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.713000] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.isolated_images = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.713182] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.713348] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.713515] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.713678] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.pci_in_placement = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.713841] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.714038] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.714223] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.714388] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.714554] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.714718] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.714882] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.track_instance_changes = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.715075] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.715254] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metrics.required = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.715423] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metrics.weight_multiplier = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.715618] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.715758] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] metrics.weight_setting = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.716097] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.716283] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] serial_console.enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.716466] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] serial_console.port_range = 10000:20000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.716640] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.716810] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.717038] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] serial_console.serialproxy_port = 6083 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.717234] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.717415] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.auth_type = password {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.717580] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.717741] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.717905] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.718081] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.718243] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.718414] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.send_service_user_token = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.718578] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.718737] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] service_user.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.718911] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.agent_enabled = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.719090] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.719427] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.719619] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.719792] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.html5proxy_port = 6082 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.719957] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.image_compression = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.720135] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.jpeg_compression = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.720296] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.playback_compression = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.720460] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.require_secure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.720628] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.server_listen = 127.0.0.1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.720797] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.720955] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.streaming_mode = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.721127] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] spice.zlib_compression = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.721295] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] upgrade_levels.baseapi = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.721465] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] upgrade_levels.compute = auto {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.721627] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] upgrade_levels.conductor = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.721787] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] upgrade_levels.scheduler = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.721955] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.722132] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.auth_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.722293] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.722453] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.722612] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.722772] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.722930] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.723104] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.723263] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vendordata_dynamic_auth.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.723438] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.api_retry_count = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.723599] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.ca_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.723769] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.cache_prefix = devstack-image-cache {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.723936] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.cluster_name = testcl1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.724114] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.connection_pool_size = 10 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.724275] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.console_delay_seconds = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.724441] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.datastore_regex = ^datastore.* {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.724643] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.724818] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.host_password = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.725026] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.host_port = 443 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.725221] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.host_username = administrator@vsphere.local {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.725396] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.insecure = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.725560] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.integration_bridge = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.725730] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.maximum_objects = 100 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.725891] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.pbm_default_policy = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.726070] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.pbm_enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.726236] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.pbm_wsdl_location = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.726406] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.726570] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.serial_port_proxy_uri = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.726729] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.serial_port_service_uri = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.726912] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.task_poll_interval = 0.5 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.727106] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.use_linked_clone = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.727284] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.vnc_keymap = en-us {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.727456] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.vnc_port = 5900 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.727621] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vmware.vnc_port_total = 10000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.727809] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.auth_schemes = ['none'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.727986] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.728300] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.728486] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.728660] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.novncproxy_port = 6080 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.728838] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.server_listen = 127.0.0.1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.729043] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.729237] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.vencrypt_ca_certs = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.729402] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.vencrypt_client_cert = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.729562] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vnc.vencrypt_client_key = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.729747] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.729913] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.disable_deep_image_inspection = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.730090] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.730257] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.730418] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.730578] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.disable_rootwrap = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.730740] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.enable_numa_live_migration = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.730901] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.731075] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.731240] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.731403] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.libvirt_disable_apic = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.731565] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.731725] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.731885] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.732062] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.732228] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.732389] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.732546] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.732704] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.732861] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.733032] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.733223] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.733392] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.client_socket_timeout = 900 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.733557] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.default_pool_size = 1000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.733723] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.keep_alive = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.733889] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.max_header_line = 16384 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.734062] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.secure_proxy_ssl_header = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.734229] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.ssl_ca_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.734393] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.ssl_cert_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.734554] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.ssl_key_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.734718] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.tcp_keepidle = 600 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.734907] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.735110] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] zvm.ca_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.735277] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] zvm.cloud_connector_url = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.735569] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.735747] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] zvm.reachable_timeout = 300 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.735931] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.enforce_new_defaults = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.736332] env[63489]: WARNING oslo_config.cfg [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 467.736524] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.enforce_scope = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.736705] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.policy_default_rule = default {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.736892] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.737117] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.policy_file = policy.yaml {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.737311] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.737479] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.737642] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.737804] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.737970] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.738157] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.738336] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.738515] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.connection_string = messaging:// {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.738687] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.enabled = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.738857] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.es_doc_type = notification {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.739036] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.es_scroll_size = 10000 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.739215] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.es_scroll_time = 2m {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.739382] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.filter_error_trace = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.739553] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.hmac_keys = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.739722] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.sentinel_service_name = mymaster {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.739889] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.socket_timeout = 0.1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.740067] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.trace_requests = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.740233] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler.trace_sqlalchemy = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.740418] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler_jaeger.process_tags = {} {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.740580] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler_jaeger.service_name_prefix = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.740747] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] profiler_otlp.service_name_prefix = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.740916] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] remote_debug.host = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.741127] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] remote_debug.port = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.741334] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.741505] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.741674] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.741840] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.742011] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.742191] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.742356] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.742522] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.742684] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.742856] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.743026] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.743208] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.743379] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.743550] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.743723] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.743894] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.744073] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.744255] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.744422] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.744587] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.744754] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.744936] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.745158] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.745301] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.745467] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.745631] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.745797] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.745961] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.746146] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.746316] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.ssl = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.746490] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.746662] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.746828] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.747034] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.747221] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.ssl_version = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.747390] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.747578] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.747750] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_notifications.retry = -1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.747939] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.748130] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_messaging_notifications.transport_url = **** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.748308] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.auth_section = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.748474] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.auth_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.748637] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.cafile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.748797] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.certfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.748967] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.collect_timing = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.749178] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.connect_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.749347] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.connect_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.749510] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.endpoint_id = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.749670] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.endpoint_override = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.749833] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.insecure = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.749992] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.keyfile = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.750166] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.max_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.750326] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.min_version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.750483] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.region_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.750645] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.retriable_status_codes = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.750805] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.service_name = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.750965] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.service_type = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.751142] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.split_loggers = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.751303] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.status_code_retries = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.751464] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.status_code_retry_delay = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.751622] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.timeout = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.751781] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.valid_interfaces = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.751940] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_limit.version = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.752119] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_reports.file_event_handler = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.752285] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.752444] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] oslo_reports.log_dir = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.752614] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.752774] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.752935] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.753137] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.753328] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.753491] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.753664] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.753824] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_ovs_privileged.group = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.753985] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.754174] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.754339] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.754497] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] vif_plug_ovs_privileged.user = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.754665] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.flat_interface = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.754843] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.755027] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.755210] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.755380] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.755547] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.755717] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.755882] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.756077] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.756257] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.isolate_vif = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.756431] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.756600] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.756774] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.756978] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.ovsdb_interface = native {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.757166] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_vif_ovs.per_port_bridge = False {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.757339] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_brick.lock_path = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.757507] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.757671] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.757847] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] privsep_osbrick.capabilities = [21] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.758039] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] privsep_osbrick.group = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.758221] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] privsep_osbrick.helper_command = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.758398] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.758568] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.758732] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] privsep_osbrick.user = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.758908] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.759086] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] nova_sys_admin.group = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.759252] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] nova_sys_admin.helper_command = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.759421] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.759587] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.759748] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] nova_sys_admin.user = None {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 467.759880] env[63489]: DEBUG oslo_service.service [None req-e910c992-230d-48d1-a34e-fd66f048243a None None] ******************************************************************************** {{(pid=63489) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 467.760390] env[63489]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 468.263459] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Getting list of instances from cluster (obj){ [ 468.263459] env[63489]: value = "domain-c8" [ 468.263459] env[63489]: _type = "ClusterComputeResource" [ 468.263459] env[63489]: } {{(pid=63489) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 468.264657] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b9488c-e997-46d6-b3b4-9dc2c1a766e5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 468.273370] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Got total of 0 instances {{(pid=63489) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 468.273909] env[63489]: WARNING nova.virt.vmwareapi.driver [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 468.274406] env[63489]: INFO nova.virt.node [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Generated node identity 6b569bc3-63ff-4af7-bc85-277940cdadde [ 468.274645] env[63489]: INFO nova.virt.node [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Wrote node identity 6b569bc3-63ff-4af7-bc85-277940cdadde to /opt/stack/data/n-cpu-1/compute_id [ 468.777395] env[63489]: WARNING nova.compute.manager [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Compute nodes ['6b569bc3-63ff-4af7-bc85-277940cdadde'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 469.782665] env[63489]: INFO nova.compute.manager [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 470.788788] env[63489]: WARNING nova.compute.manager [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 470.789180] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.789324] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.789516] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.789636] env[63489]: DEBUG nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63489) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 470.790591] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f21ded0-da33-41e5-94d6-c1a365010ea9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.799165] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af97ef2-a3ca-4209-9738-f0b048fd400a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.812868] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccfad74-5302-48a2-bef3-85d46dcef6cb {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.819491] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04720cdb-b831-4230-b999-cf468fb5eb35 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.848916] env[63489]: DEBUG nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181598MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63489) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 470.849192] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.849455] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 471.353680] env[63489]: WARNING nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] No compute node record for cpu-1:6b569bc3-63ff-4af7-bc85-277940cdadde: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 6b569bc3-63ff-4af7-bc85-277940cdadde could not be found. [ 471.855572] env[63489]: INFO nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 6b569bc3-63ff-4af7-bc85-277940cdadde [ 473.363110] env[63489]: DEBUG nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63489) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 473.363482] env[63489]: DEBUG nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63489) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 473.515321] env[63489]: INFO nova.scheduler.client.report [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] [req-b9df0506-0e2c-48a9-b439-730a51b6f32d] Created resource provider record via placement API for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 473.533204] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9111aebd-1a4c-4819-8ec6-bb980ae00435 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.540757] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f381219d-acec-4972-ace3-651ee6152ba6 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.570301] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5f4623-35be-4605-888e-5d68d4da2138 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.576938] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883ac479-8538-4798-8c41-d3e2d39e7356 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 473.589552] env[63489]: DEBUG nova.compute.provider_tree [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 474.125675] env[63489]: DEBUG nova.scheduler.client.report [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Updated inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 474.125909] env[63489]: DEBUG nova.compute.provider_tree [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Updating resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde generation from 0 to 1 during operation: update_inventory {{(pid=63489) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 474.126062] env[63489]: DEBUG nova.compute.provider_tree [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 474.174780] env[63489]: DEBUG nova.compute.provider_tree [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Updating resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde generation from 1 to 2 during operation: update_traits {{(pid=63489) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 474.680063] env[63489]: DEBUG nova.compute.resource_tracker [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63489) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 474.680448] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.831s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 474.680570] env[63489]: DEBUG nova.service [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Creating RPC server for service compute {{(pid=63489) start /opt/stack/nova/nova/service.py:186}} [ 474.695757] env[63489]: DEBUG nova.service [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] Join ServiceGroup membership for this service compute {{(pid=63489) start /opt/stack/nova/nova/service.py:203}} [ 474.696039] env[63489]: DEBUG nova.servicegroup.drivers.db [None req-a634fe7f-27cf-4c6e-a8a9-b9225c2578bf None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63489) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 500.700139] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._sync_power_states {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 501.205371] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Getting list of instances from cluster (obj){ [ 501.205371] env[63489]: value = "domain-c8" [ 501.205371] env[63489]: _type = "ClusterComputeResource" [ 501.205371] env[63489]: } {{(pid=63489) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 501.206649] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a59c7f-fd35-4610-bf0c-7088b0bdb150 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.214818] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Got total of 0 instances {{(pid=63489) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 501.215071] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 501.215377] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Getting list of instances from cluster (obj){ [ 501.215377] env[63489]: value = "domain-c8" [ 501.215377] env[63489]: _type = "ClusterComputeResource" [ 501.215377] env[63489]: } {{(pid=63489) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 501.216314] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac61579a-ebf0-465c-9838-285b30d638f2 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.224386] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Got total of 0 instances {{(pid=63489) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 511.025548] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquiring lock "a577fdfb-711e-4f8d-b111-80e093374fc4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.025861] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Lock "a577fdfb-711e-4f8d-b111-80e093374fc4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.531140] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 512.089630] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.089880] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.092802] env[63489]: INFO nova.compute.claims [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 512.788520] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquiring lock "9bb676ad-a339-4a05-9a66-3bb817543156" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.788520] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Lock "9bb676ad-a339-4a05-9a66-3bb817543156" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.207222] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3666263-751c-43d1-a32b-419004682b9b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.218817] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquiring lock "44c8d268-4c23-4b85-915d-3c708586046e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.219061] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Lock "44c8d268-4c23-4b85-915d-3c708586046e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.220978] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee32588f-b309-4b27-b134-9f7a90e8f09d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.258023] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f895191c-8685-454d-ab67-c3e90a0185da {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.266632] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310c8110-dcee-4f2d-881c-7aa4d78b0822 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.281421] env[63489]: DEBUG nova.compute.provider_tree [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 513.290636] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 513.557197] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "013f2d36-9578-45d2-aff0-170b5fd97506" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.557447] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "013f2d36-9578-45d2-aff0-170b5fd97506" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.567356] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquiring lock "91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.567811] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.725143] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 513.785484] env[63489]: DEBUG nova.scheduler.client.report [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 513.816100] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.059757] env[63489]: DEBUG nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 514.069759] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 514.250064] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.295885] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.301316] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 514.302858] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.487s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.304315] env[63489]: INFO nova.compute.claims [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 514.411695] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquiring lock "5373362e-671e-44f4-8c0b-8396a3925c12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.411882] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "5373362e-671e-44f4-8c0b-8396a3925c12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.598814] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.598814] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquiring lock "5e32d6c0-d943-416f-9a54-e3511c933ca9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.598814] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Lock "5e32d6c0-d943-416f-9a54-e3511c933ca9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.603806] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.808781] env[63489]: DEBUG nova.compute.utils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 514.814499] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 514.814499] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 514.913961] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 515.004021] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquiring lock "276c32a2-da0f-420f-a9f2-b13c1fd62586" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.004294] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Lock "276c32a2-da0f-420f-a9f2-b13c1fd62586" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.105426] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 515.241909] env[63489]: DEBUG nova.policy [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b1032b0530334282a305fa14e9c9bbd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4328cd9ac32e4510a8531dd208cb0f86', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 515.320289] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 515.445175] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.483234] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f9ecdb-4fe3-40c9-8435-8de197070483 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.492758] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151acea6-6c1d-4bc7-b373-453941fe9310 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.527473] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 515.531641] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3284f7c4-3134-4572-a4eb-4c2a7bb9f149 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.539492] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32da34ff-7e64-46bc-8a55-1ea15a2458ac {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.558803] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.634732] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.054081] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.064629] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 516.270212] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Successfully created port: 53fc1a40-c521-4345-98f2-e863cab2017d {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 516.337219] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 516.374961] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 516.375111] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 516.377084] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 516.377084] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 516.377084] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 516.377084] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 516.377319] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 516.377492] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 516.379292] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 516.379292] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 516.379292] env[63489]: DEBUG nova.virt.hardware [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 516.379292] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef04a81-57d2-414d-b68b-cc3ac91c398b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.391963] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67701ee2-099e-4dd7-bb04-189e2a1e8602 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.412103] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd90f9f-9ac6-4737-9279-8bc2f059e1df {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.574428] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 516.575806] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 516.579971] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.330s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.581905] env[63489]: INFO nova.compute.claims [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 517.088873] env[63489]: DEBUG nova.compute.utils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 517.093298] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 517.093566] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 517.318522] env[63489]: DEBUG nova.policy [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6195c5a6a1664d7e96aec4f615cbf714', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eae1632ae19e46689c256fc504567258', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 517.594820] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 517.761639] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eeccb98-6c74-431b-a4b8-70213f608b11 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.770512] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad20b8f4-08df-43d5-881e-a3471b14be52 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.808102] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c72bb2-3a6f-416e-a90d-a2792973dc7a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.815816] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5cb243-68ba-4ecf-9d07-3b5269b89131 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.829808] env[63489]: DEBUG nova.compute.provider_tree [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.333010] env[63489]: DEBUG nova.scheduler.client.report [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 518.605244] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Successfully created port: c3ce6b67-3b42-4c01-9783-0ed5859bffd4 {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 518.612334] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 518.650741] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 518.651410] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 518.651410] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 518.651410] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 518.651636] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 518.651636] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 518.652256] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 518.652256] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 518.652256] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 518.652345] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 518.653130] env[63489]: DEBUG nova.virt.hardware [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 518.653787] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081bdf89-644e-4ec9-b30c-fd8c191959f9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.661769] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1030d1c-c7f1-4cd4-9733-a2c7d9e5a5b1 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.846943] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.847504] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 518.852765] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.257s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.854240] env[63489]: INFO nova.compute.claims [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.362459] env[63489]: DEBUG nova.compute.utils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.371987] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 519.371987] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 519.581761] env[63489]: DEBUG nova.policy [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '048bf691170847098ebdc4236554d487', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6016c0281372437ab06c1fea32fed4c8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.841612] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquiring lock "81e42a2c-fb30-42e2-a2a9-45f3184739e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.841612] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Lock "81e42a2c-fb30-42e2-a2a9-45f3184739e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.872026] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 520.080980] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a204e60-9f40-469a-b02d-dc3d76b15249 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.089458] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dea4564-30d5-432e-8088-a318481b75ac {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.135222] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ffb2cb-83b6-4898-9444-e628ba343d43 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.142378] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f13f1c-0af8-4fe1-ae46-62365556bdf0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.157693] env[63489]: DEBUG nova.compute.provider_tree [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 520.344905] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 520.662811] env[63489]: DEBUG nova.scheduler.client.report [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 520.884158] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.885347] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 520.924491] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.924628] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.924745] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.925266] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.926040] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.926040] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.926040] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.926040] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.927625] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.927625] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.927625] env[63489]: DEBUG nova.virt.hardware [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.927625] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805aaea9-f206-4d64-a233-97dfc9048509 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.938089] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59783c51-479c-41b2-9e64-2475ed926e41 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.979426] env[63489]: ERROR nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 520.979426] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 520.979426] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.979426] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.979426] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.979426] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.979426] env[63489]: ERROR nova.compute.manager raise self.value [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.979426] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 520.979426] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.979426] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 520.979929] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.979929] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 520.979929] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 520.979929] env[63489]: ERROR nova.compute.manager [ 520.979929] env[63489]: Traceback (most recent call last): [ 520.980551] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 520.980551] env[63489]: listener.cb(fileno) [ 520.980551] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.980551] env[63489]: result = function(*args, **kwargs) [ 520.980551] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.980551] env[63489]: return func(*args, **kwargs) [ 520.980551] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 520.980551] env[63489]: raise e [ 520.980551] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 520.980551] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 520.980551] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.980551] env[63489]: created_port_ids = self._update_ports_for_instance( [ 520.980551] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.980551] env[63489]: with excutils.save_and_reraise_exception(): [ 520.980551] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.980551] env[63489]: self.force_reraise() [ 520.980551] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.980551] env[63489]: raise self.value [ 520.980551] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.980551] env[63489]: updated_port = self._update_port( [ 520.980551] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.980551] env[63489]: _ensure_no_port_binding_failure(port) [ 520.980551] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.980551] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 520.980551] env[63489]: nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 520.980551] env[63489]: Removing descriptor: 15 [ 520.982146] env[63489]: ERROR nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Traceback (most recent call last): [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] yield resources [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self.driver.spawn(context, instance, image_meta, [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] vm_ref = self.build_virtual_machine(instance, [ 520.982146] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] vif_infos = vmwarevif.get_vif_info(self._session, [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] for vif in network_info: [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return self._sync_wrapper(fn, *args, **kwargs) [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self.wait() [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self[:] = self._gt.wait() [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return self._exit_event.wait() [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 520.982503] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] result = hub.switch() [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return self.greenlet.switch() [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] result = function(*args, **kwargs) [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return func(*args, **kwargs) [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] raise e [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] nwinfo = self.network_api.allocate_for_instance( [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] created_port_ids = self._update_ports_for_instance( [ 520.982881] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] with excutils.save_and_reraise_exception(): [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self.force_reraise() [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] raise self.value [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] updated_port = self._update_port( [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] _ensure_no_port_binding_failure(port) [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] raise exception.PortBindingFailed(port_id=port['id']) [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 520.983792] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] [ 520.984386] env[63489]: INFO nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Terminating instance [ 520.985679] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquiring lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 520.985908] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquired lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 520.986134] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 521.174920] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.174920] env[63489]: DEBUG nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 521.177536] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.574s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.178997] env[63489]: INFO nova.compute.claims [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 521.478067] env[63489]: DEBUG nova.compute.manager [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Received event network-changed-53fc1a40-c521-4345-98f2-e863cab2017d {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 521.478805] env[63489]: DEBUG nova.compute.manager [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Refreshing instance network info cache due to event network-changed-53fc1a40-c521-4345-98f2-e863cab2017d. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 521.478805] env[63489]: DEBUG oslo_concurrency.lockutils [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] Acquiring lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.539918] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 521.571931] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Successfully created port: 863ad12f-85ef-4ee7-a683-03416fb6d240 {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.686199] env[63489]: DEBUG nova.compute.utils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 521.691430] env[63489]: DEBUG nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Not allocating networking since 'none' was specified. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 521.857688] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.035242] env[63489]: ERROR nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 522.035242] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 522.035242] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.035242] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.035242] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.035242] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.035242] env[63489]: ERROR nova.compute.manager raise self.value [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.035242] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 522.035242] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.035242] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 522.035982] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.035982] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 522.035982] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 522.035982] env[63489]: ERROR nova.compute.manager [ 522.035982] env[63489]: Traceback (most recent call last): [ 522.035982] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 522.035982] env[63489]: listener.cb(fileno) [ 522.035982] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.035982] env[63489]: result = function(*args, **kwargs) [ 522.035982] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.035982] env[63489]: return func(*args, **kwargs) [ 522.035982] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 522.035982] env[63489]: raise e [ 522.035982] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 522.035982] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 522.035982] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.035982] env[63489]: created_port_ids = self._update_ports_for_instance( [ 522.035982] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.035982] env[63489]: with excutils.save_and_reraise_exception(): [ 522.035982] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.035982] env[63489]: self.force_reraise() [ 522.035982] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.035982] env[63489]: raise self.value [ 522.035982] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.035982] env[63489]: updated_port = self._update_port( [ 522.035982] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.035982] env[63489]: _ensure_no_port_binding_failure(port) [ 522.035982] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.035982] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 522.037690] env[63489]: nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 522.037690] env[63489]: Removing descriptor: 16 [ 522.037690] env[63489]: ERROR nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Traceback (most recent call last): [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] yield resources [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self.driver.spawn(context, instance, image_meta, [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self._vmops.spawn(context, instance, image_meta, injected_files, [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 522.037690] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] vm_ref = self.build_virtual_machine(instance, [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] vif_infos = vmwarevif.get_vif_info(self._session, [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] for vif in network_info: [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return self._sync_wrapper(fn, *args, **kwargs) [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self.wait() [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self[:] = self._gt.wait() [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return self._exit_event.wait() [ 522.038258] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] result = hub.switch() [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return self.greenlet.switch() [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] result = function(*args, **kwargs) [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return func(*args, **kwargs) [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] raise e [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] nwinfo = self.network_api.allocate_for_instance( [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.038685] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] created_port_ids = self._update_ports_for_instance( [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] with excutils.save_and_reraise_exception(): [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self.force_reraise() [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] raise self.value [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] updated_port = self._update_port( [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] _ensure_no_port_binding_failure(port) [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.039058] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] raise exception.PortBindingFailed(port_id=port['id']) [ 522.039377] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 522.039377] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] [ 522.039377] env[63489]: INFO nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Terminating instance [ 522.046152] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquiring lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.046152] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquired lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.046152] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 522.191921] env[63489]: DEBUG nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 522.367667] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Releasing lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.368147] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 522.368339] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 522.368647] env[63489]: DEBUG oslo_concurrency.lockutils [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] Acquired lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.368810] env[63489]: DEBUG nova.network.neutron [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Refreshing network info cache for port 53fc1a40-c521-4345-98f2-e863cab2017d {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 522.369893] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90fa4d18-561d-46f9-a519-3a3d4cfc9f33 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.381710] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3dd90e-4ecf-4846-af40-03ec2eedbec5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.421771] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a577fdfb-711e-4f8d-b111-80e093374fc4 could not be found. [ 522.421849] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 522.422341] env[63489]: INFO nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 522.422677] env[63489]: DEBUG oslo.service.loopingcall [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.422876] env[63489]: DEBUG nova.compute.manager [-] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 522.422953] env[63489]: DEBUG nova.network.neutron [-] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 522.460285] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8601dfed-dbe8-4b91-80ca-c3e4ee680ba7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.472184] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9470eecd-2e66-476d-a65b-629d83072cd3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.477955] env[63489]: DEBUG nova.network.neutron [-] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.523907] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac367fe6-b0e1-400e-9e84-cf5b5cb684f9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.534083] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff2b87c-8ee3-4359-982e-28206f789fe0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.553529] env[63489]: DEBUG nova.compute.provider_tree [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 522.602467] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.763105] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.895286] env[63489]: DEBUG nova.network.neutron [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 522.982475] env[63489]: DEBUG nova.network.neutron [-] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.048979] env[63489]: DEBUG nova.network.neutron [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.050275] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Acquiring lock "bd337b87-0c9e-44eb-81bf-572610ac5680" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.050492] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Lock "bd337b87-0c9e-44eb-81bf-572610ac5680" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.057213] env[63489]: DEBUG nova.scheduler.client.report [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 523.182186] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.182518] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.182797] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Starting heal instance info cache {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 523.182889] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Rebuilding the list of instances to heal {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 523.209140] env[63489]: DEBUG nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 523.245897] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.245988] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.246203] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.246321] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.246792] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.246792] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.246893] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.246952] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.247843] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.248076] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.248842] env[63489]: DEBUG nova.virt.hardware [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.251582] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c850c6ad-9179-47bc-969f-3cef680c7616 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.266199] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443dc9d3-24c3-4598-891c-f3331fa911a4 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.272110] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Releasing lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.272994] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 523.272994] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 523.273112] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a7e4301-9a92-433f-b013-022f901f1082 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.291773] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance VIF info [] {{(pid=63489) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 523.304227] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63489) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 523.305387] env[63489]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18d27a9d-6777-4078-b1da-fe634c6f8a33 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.313496] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776ed671-6cab-4df3-8090-767a070ff1e5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.326147] env[63489]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 523.326526] env[63489]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63489) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 523.327507] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=63489) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 523.327646] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating folder: Project (b4a8676c9048405687b97c36c575ae98). Parent ref: group-v232129. {{(pid=63489) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 523.328252] env[63489]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78a429f2-6ed6-46a6-b24b-da7b85a5e8c8 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.339229] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9bb676ad-a339-4a05-9a66-3bb817543156 could not be found. [ 523.339424] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 523.339617] env[63489]: INFO nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Took 0.07 seconds to destroy the instance on the hypervisor. [ 523.339861] env[63489]: DEBUG oslo.service.loopingcall [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.341376] env[63489]: DEBUG nova.compute.manager [-] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 523.341475] env[63489]: DEBUG nova.network.neutron [-] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 523.343223] env[63489]: INFO nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Created folder: Project (b4a8676c9048405687b97c36c575ae98) in parent group-v232129. [ 523.343387] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating folder: Instances. Parent ref: group-v232132. {{(pid=63489) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 523.343611] env[63489]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78d361b6-948a-4961-84e9-66e05d4337e1 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.352554] env[63489]: INFO nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Created folder: Instances in parent group-v232132. [ 523.352754] env[63489]: DEBUG oslo.service.loopingcall [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.352939] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Creating VM on the ESX host {{(pid=63489) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 523.353151] env[63489]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c18c529c-9cab-4883-bb50-53c6b03cd712 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.380813] env[63489]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 523.380813] env[63489]: value = "task-1050188" [ 523.380813] env[63489]: _type = "Task" [ 523.380813] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.387317] env[63489]: DEBUG nova.network.neutron [-] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.396138] env[63489]: DEBUG oslo_vmware.api [-] Task: {'id': task-1050188, 'name': CreateVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.488853] env[63489]: INFO nova.compute.manager [-] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Took 1.06 seconds to deallocate network for instance. [ 523.493971] env[63489]: DEBUG nova.compute.claims [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 523.494343] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.558080] env[63489]: DEBUG oslo_concurrency.lockutils [req-59934a15-eef5-43b0-8d60-89cb274d956a req-7cc9adfe-f9a0-4706-988e-3ecd909cdd6a service nova] Releasing lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.565208] env[63489]: DEBUG nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 523.568968] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.569818] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 523.575019] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.130s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.576641] env[63489]: INFO nova.compute.claims [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 523.692022] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Skipping network cache update for instance because it is Building. {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 523.692022] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Skipping network cache update for instance because it is Building. {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 523.692022] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Skipping network cache update for instance because it is Building. {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 523.692022] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Skipping network cache update for instance because it is Building. {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 523.692022] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Skipping network cache update for instance because it is Building. {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 523.692022] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Didn't find any instances for network info cache update. {{(pid=63489) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 523.692452] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.692647] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.693610] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.693610] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.694650] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.695728] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.695728] env[63489]: DEBUG nova.compute.manager [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63489) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 523.695728] env[63489]: DEBUG oslo_service.periodic_task [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Running periodic task ComputeManager.update_available_resource {{(pid=63489) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.893599] env[63489]: DEBUG nova.network.neutron [-] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.894786] env[63489]: DEBUG oslo_vmware.api [-] Task: {'id': task-1050188, 'name': CreateVM_Task, 'duration_secs': 0.287197} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.894950] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Created VM on the ESX host {{(pid=63489) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 523.896128] env[63489]: DEBUG oslo_vmware.service [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6c3a0f-f0a9-4157-93a6-41f36227ddc7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.904504] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.904824] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.905414] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 523.905670] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee244007-6ea7-4723-989c-636e3ebd7fc6 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.914057] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 523.914057] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]52abf53e-3716-e3e9-32a5-73ecf7ac6468" [ 523.914057] env[63489]: _type = "Task" [ 523.914057] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.923133] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52abf53e-3716-e3e9-32a5-73ecf7ac6468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.084888] env[63489]: DEBUG nova.compute.utils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 524.088984] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 524.091023] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 524.109789] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.198922] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.230875] env[63489]: DEBUG nova.compute.manager [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Received event network-vif-deleted-53fc1a40-c521-4345-98f2-e863cab2017d {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 524.231114] env[63489]: DEBUG nova.compute.manager [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Received event network-changed-c3ce6b67-3b42-4c01-9783-0ed5859bffd4 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 524.231288] env[63489]: DEBUG nova.compute.manager [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Refreshing instance network info cache due to event network-changed-c3ce6b67-3b42-4c01-9783-0ed5859bffd4. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 524.231503] env[63489]: DEBUG oslo_concurrency.lockutils [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] Acquiring lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.231635] env[63489]: DEBUG oslo_concurrency.lockutils [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] Acquired lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.231792] env[63489]: DEBUG nova.network.neutron [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Refreshing network info cache for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4 {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 524.396978] env[63489]: INFO nova.compute.manager [-] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Took 1.05 seconds to deallocate network for instance. [ 524.399456] env[63489]: DEBUG nova.compute.claims [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 524.399456] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.430282] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.430529] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Processing image 314454d0-cea8-4ac2-8a2e-d19d8731016c {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 524.431417] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.432718] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.432718] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 524.432718] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2fa72f1-51c3-4183-a41c-83dfc8b02bbf {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.451351] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 524.451918] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63489) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 524.452445] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73093346-d666-45ed-9c92-89dc0e5cd64a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.459693] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8282887-7a8c-4df0-a847-9126283d804e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.467187] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 524.467187] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]52504515-52ea-5cae-3fcd-8da23558d08c" [ 524.467187] env[63489]: _type = "Task" [ 524.467187] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.478099] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52504515-52ea-5cae-3fcd-8da23558d08c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.538410] env[63489]: DEBUG nova.policy [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5d00d2caab3b43899e9255c199d6c6e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e79e2ab3555e4c729e015aa151846115', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 524.588921] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 524.785108] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4592f89f-574a-4c0e-a457-c02e24444034 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.793404] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f98814-bcf1-4bda-8024-425b99ac7b98 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.833387] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e193e798-39fd-4864-aa88-ca43d5f76074 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.836977] env[63489]: DEBUG nova.network.neutron [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.842817] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e504d5-1e78-49d1-a626-fde14fa861c7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.857709] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.981293] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Preparing fetch location {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 524.981806] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating directory with path [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 524.984159] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df8a5b7c-37c4-4884-a5ad-10c7ac78e33a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.004685] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Created directory with path [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 525.004685] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Fetch image to [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 525.004685] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Downloading image file data 314454d0-cea8-4ac2-8a2e-d19d8731016c to [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk on the data store datastore2 {{(pid=63489) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 525.004685] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20b34e7-742e-4cae-92c4-500f6635b5b0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.014846] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa736377-93ba-4bad-9ed2-bdaf2a5247f2 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.027162] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd9a4a6-33cc-42cb-aefb-dd3aeb9e0937 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.070767] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051bf80c-9e93-4667-a3e1-11972c9d1c77 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.083207] env[63489]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-40a6260d-f951-4113-8aa6-46dcb24df32a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.114103] env[63489]: DEBUG nova.virt.vmwareapi.images [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Downloading image file data 314454d0-cea8-4ac2-8a2e-d19d8731016c to the data store datastore2 {{(pid=63489) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 525.210658] env[63489]: DEBUG oslo_vmware.rw_handles [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63489) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 525.302669] env[63489]: DEBUG nova.network.neutron [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.360972] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 525.602683] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 525.671533] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Acquiring lock "112519dc-e533-4428-a235-5bef1fd4acae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.671533] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Lock "112519dc-e533-4428-a235-5bef1fd4acae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.751633] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 525.751906] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 525.752063] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 525.752246] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 525.752416] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 525.752529] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 525.752757] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 525.752888] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 525.753172] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 525.753390] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 525.753569] env[63489]: DEBUG nova.virt.hardware [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 525.754468] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2481b266-dae2-4120-99d1-9d6ccdbdc82b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.766980] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464c1176-f71d-4401-b950-7a06770b334a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.803901] env[63489]: DEBUG oslo_concurrency.lockutils [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] Releasing lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.804371] env[63489]: DEBUG nova.compute.manager [req-f4749e27-8845-488f-9a72-82a2461f7943 req-112b4de9-d993-4d96-82ca-a30d542afc0f service nova] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Received event network-vif-deleted-c3ce6b67-3b42-4c01-9783-0ed5859bffd4 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 525.866128] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.866796] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 525.869696] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.235s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.871163] env[63489]: INFO nova.compute.claims [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 525.906161] env[63489]: DEBUG oslo_vmware.rw_handles [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Completed reading data from the image iterator. {{(pid=63489) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 525.906161] env[63489]: DEBUG oslo_vmware.rw_handles [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=63489) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 526.054111] env[63489]: DEBUG nova.virt.vmwareapi.images [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Downloaded image file data 314454d0-cea8-4ac2-8a2e-d19d8731016c to vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk on the data store datastore2 {{(pid=63489) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 526.056397] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Caching image {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 526.056397] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Copying Virtual Disk [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk to [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 526.056397] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93a419bc-2d39-44fb-85d3-0ca4c830a3da {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.070989] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 526.070989] env[63489]: value = "task-1050189" [ 526.070989] env[63489]: _type = "Task" [ 526.070989] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.082740] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.375872] env[63489]: DEBUG nova.compute.utils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 526.381057] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 526.381322] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 526.442590] env[63489]: ERROR nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 526.442590] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.442590] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.442590] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.442590] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.442590] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.442590] env[63489]: ERROR nova.compute.manager raise self.value [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.442590] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 526.442590] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.442590] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 526.443052] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.443052] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 526.443052] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 526.443052] env[63489]: ERROR nova.compute.manager [ 526.443052] env[63489]: Traceback (most recent call last): [ 526.443052] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 526.443052] env[63489]: listener.cb(fileno) [ 526.443052] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.443052] env[63489]: result = function(*args, **kwargs) [ 526.443052] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.443052] env[63489]: return func(*args, **kwargs) [ 526.443052] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.443052] env[63489]: raise e [ 526.443052] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.443052] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 526.443052] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.443052] env[63489]: created_port_ids = self._update_ports_for_instance( [ 526.443052] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.443052] env[63489]: with excutils.save_and_reraise_exception(): [ 526.443052] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.443052] env[63489]: self.force_reraise() [ 526.443052] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.443052] env[63489]: raise self.value [ 526.443052] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.443052] env[63489]: updated_port = self._update_port( [ 526.443052] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.443052] env[63489]: _ensure_no_port_binding_failure(port) [ 526.443052] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.443052] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 526.443768] env[63489]: nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 526.443768] env[63489]: Removing descriptor: 17 [ 526.446955] env[63489]: ERROR nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Traceback (most recent call last): [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] yield resources [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self.driver.spawn(context, instance, image_meta, [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] vm_ref = self.build_virtual_machine(instance, [ 526.446955] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] vif_infos = vmwarevif.get_vif_info(self._session, [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] for vif in network_info: [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return self._sync_wrapper(fn, *args, **kwargs) [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self.wait() [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self[:] = self._gt.wait() [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return self._exit_event.wait() [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 526.447468] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] result = hub.switch() [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return self.greenlet.switch() [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] result = function(*args, **kwargs) [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return func(*args, **kwargs) [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] raise e [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] nwinfo = self.network_api.allocate_for_instance( [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] created_port_ids = self._update_ports_for_instance( [ 526.447946] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] with excutils.save_and_reraise_exception(): [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self.force_reraise() [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] raise self.value [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] updated_port = self._update_port( [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] _ensure_no_port_binding_failure(port) [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] raise exception.PortBindingFailed(port_id=port['id']) [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 526.448316] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] [ 526.448892] env[63489]: INFO nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Terminating instance [ 526.450333] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquiring lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.450521] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquired lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.450717] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 526.581213] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050189, 'name': CopyVirtualDisk_Task} progress is 70%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.697988] env[63489]: DEBUG nova.policy [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a20543825e02414c9d8291911d9eb171', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b8c7246bc5547cc80a9618e414f3e9d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 526.815990] env[63489]: DEBUG nova.compute.manager [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Received event network-changed-863ad12f-85ef-4ee7-a683-03416fb6d240 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.815990] env[63489]: DEBUG nova.compute.manager [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Refreshing instance network info cache due to event network-changed-863ad12f-85ef-4ee7-a683-03416fb6d240. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 526.815990] env[63489]: DEBUG oslo_concurrency.lockutils [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] Acquiring lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.888800] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 527.043494] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 527.089370] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050189, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.090761] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Successfully created port: 7476d068-710a-4b58-be40-117338c13436 {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 527.227232] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b1c5c3-646c-4038-a5df-1c0dff64532b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.236112] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2a2319-e809-410a-b407-8514a0476389 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.272509] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0c29bc-778c-4de9-99b8-ec6b4273d931 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.281237] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5707444-3d9e-487e-8715-bc5283da2983 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.298140] env[63489]: DEBUG nova.compute.provider_tree [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 527.363091] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.590377] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050189, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.069964} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.590708] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Copied Virtual Disk [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk to [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 527.590961] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleting the datastore file [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c/tmp-sparse.vmdk {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 527.591561] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1794f978-2992-4d8b-a081-599768c99bb7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.600178] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 527.600178] env[63489]: value = "task-1050191" [ 527.600178] env[63489]: _type = "Task" [ 527.600178] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.610574] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.825518] env[63489]: ERROR nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [req-6f8ed4e9-dd5b-4fc4-b0af-fd217cc82f12] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6f8ed4e9-dd5b-4fc4-b0af-fd217cc82f12"}]} [ 527.847494] env[63489]: DEBUG nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 527.871058] env[63489]: DEBUG nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 527.871058] env[63489]: DEBUG nova.compute.provider_tree [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 527.871058] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Releasing lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.873325] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 527.873646] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 527.874263] env[63489]: DEBUG oslo_concurrency.lockutils [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] Acquired lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.874446] env[63489]: DEBUG nova.network.neutron [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Refreshing network info cache for port 863ad12f-85ef-4ee7-a683-03416fb6d240 {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 527.880471] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77794309-2ed8-43db-ae2d-6f566c8e82ea {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.892820] env[63489]: DEBUG nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 527.898553] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3708af4-4528-42ca-879e-64f255fe1e3b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.912307] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 527.933044] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 44c8d268-4c23-4b85-915d-3c708586046e could not be found. [ 527.933044] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 527.933044] env[63489]: INFO nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 527.933044] env[63489]: DEBUG oslo.service.loopingcall [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 527.933044] env[63489]: DEBUG nova.compute.manager [-] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 527.933044] env[63489]: DEBUG nova.network.neutron [-] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 527.945629] env[63489]: DEBUG nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 527.950109] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.950359] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.950509] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.950778] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.950863] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.950966] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.951178] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.951326] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.951480] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.951632] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.951793] env[63489]: DEBUG nova.virt.hardware [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.953084] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194d62ee-42ba-4e49-ab1e-a6ac28167a9f {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.965943] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a66f46-cfb8-4a47-9e85-08c81377e3ba {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.012934] env[63489]: DEBUG nova.network.neutron [-] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.115124] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.052372} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.115124] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleted the datastore file {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 528.115124] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Moving file from [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a/314454d0-cea8-4ac2-8a2e-d19d8731016c to [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c. {{(pid=63489) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 528.115752] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-041d467d-f848-4543-b370-7d9ceffaaed7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.125529] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 528.125529] env[63489]: value = "task-1050192" [ 528.125529] env[63489]: _type = "Task" [ 528.125529] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.134298] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050192, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.166523] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a380ea-50a5-4186-80f6-8dab9c06288d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.175403] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db019e17-2e26-4599-b9e0-35e06716d699 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.208008] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Successfully created port: addaad04-c9d7-4994-842b-b3501824f997 {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 528.211159] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee53056f-6cf5-4405-8d03-6d4ad0c09e23 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.219874] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85062035-68f0-4034-9b7f-a11cab3bf529 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.234247] env[63489]: DEBUG nova.compute.provider_tree [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 528.450852] env[63489]: DEBUG nova.network.neutron [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.515321] env[63489]: DEBUG nova.network.neutron [-] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.639335] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050192, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025616} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.639657] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] File moved {{(pid=63489) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 528.639854] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Cleaning up location [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 528.641344] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleting the datastore file [datastore2] vmware_temp/a47a1bef-0b2d-41de-bc7d-3837369f5b9a {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 528.641344] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cede01e-051a-4a7b-a123-e4b2e92d9913 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.648291] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 528.648291] env[63489]: value = "task-1050194" [ 528.648291] env[63489]: _type = "Task" [ 528.648291] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.664184] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.789402] env[63489]: DEBUG nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updated inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with generation 15 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 528.789402] env[63489]: DEBUG nova.compute.provider_tree [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updating resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde generation from 15 to 16 during operation: update_inventory {{(pid=63489) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 528.789402] env[63489]: DEBUG nova.compute.provider_tree [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 528.865898] env[63489]: DEBUG nova.network.neutron [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.018838] env[63489]: INFO nova.compute.manager [-] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Took 1.09 seconds to deallocate network for instance. [ 529.024940] env[63489]: DEBUG nova.compute.claims [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 529.025150] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.162879] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025416} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.163158] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleted the datastore file {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 529.164166] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0f9b112-74d0-4d54-a2e9-7304a5bfb3ac {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.169751] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 529.169751] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]5272ec74-2d59-0046-dde7-9ad427ad6dff" [ 529.169751] env[63489]: _type = "Task" [ 529.169751] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.177596] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]5272ec74-2d59-0046-dde7-9ad427ad6dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.301430] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.432s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.302374] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 529.305585] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.252s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.307300] env[63489]: INFO nova.compute.claims [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.370636] env[63489]: DEBUG oslo_concurrency.lockutils [req-3675f506-19ed-40d0-8a39-2382a09a1cbb req-4247ae63-b1eb-4ef2-89bb-a75d275b8245 service nova] Releasing lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.681118] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]5272ec74-2d59-0046-dde7-9ad427ad6dff, 'name': SearchDatastore_Task, 'duration_secs': 0.009275} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.681476] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.681824] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk to [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 529.681966] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a866173-91d6-436a-a891-2b173fffb560 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.688773] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 529.688773] env[63489]: value = "task-1050195" [ 529.688773] env[63489]: _type = "Task" [ 529.688773] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.697888] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.812644] env[63489]: DEBUG nova.compute.utils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.818959] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 529.818959] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 530.035152] env[63489]: DEBUG nova.policy [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bcc832588b54eafbcc2aa97809cb202', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0291e7b8556646e0b5e4658b3e1659eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 530.200047] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504964} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.200283] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk to [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 530.200592] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Extending root virtual disk to 1048576 {{(pid=63489) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 530.200746] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-befa3e98-fbbb-4607-b867-0562305a3958 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.210964] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 530.210964] env[63489]: value = "task-1050196" [ 530.210964] env[63489]: _type = "Task" [ 530.210964] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.219561] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050196, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.323103] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 530.593100] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6250d0f3-ac91-475c-9c0f-b44bd136bc09 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.604217] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e15452-2834-4377-ad87-ef26b9321d17 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.655383] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99992c17-3491-49f3-a7f3-f52b8dbb4b8b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.662715] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8bb5b2-a834-4e1b-a989-ad84e61d38e6 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.684331] env[63489]: DEBUG nova.compute.provider_tree [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.726244] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050196, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068543} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.726558] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Extended root virtual disk {{(pid=63489) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 530.727474] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a8a989-59cc-40c7-af7c-7a071fb90d03 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.758542] env[63489]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk or device None with type sparse {{(pid=63489) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 530.758542] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-214a6b64-80be-4109-b2d5-c72b078ddd86 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.778740] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 530.778740] env[63489]: value = "task-1050197" [ 530.778740] env[63489]: _type = "Task" [ 530.778740] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.787583] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050197, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.874887] env[63489]: DEBUG nova.compute.manager [req-942dcc0b-abe2-4855-b67a-bb04af3fa5a0 req-cecc471c-49a9-4e53-a12b-00d5202094a4 service nova] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Received event network-vif-deleted-863ad12f-85ef-4ee7-a683-03416fb6d240 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 530.959986] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Acquiring lock "1c440617-c0ec-485b-a2cc-cd0c8a9d60df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.960390] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Lock "1c440617-c0ec-485b-a2cc-cd0c8a9d60df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.186453] env[63489]: DEBUG nova.scheduler.client.report [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.212304] env[63489]: DEBUG oslo_concurrency.lockutils [None req-71fac178-826e-4401-897d-73c7435935d1 tempest-VolumesAssistedSnapshotsTest-130347173 tempest-VolumesAssistedSnapshotsTest-130347173-project-member] Acquiring lock "27c8104f-a58f-4416-8a3b-d9be8ca6533b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.212470] env[63489]: DEBUG oslo_concurrency.lockutils [None req-71fac178-826e-4401-897d-73c7435935d1 tempest-VolumesAssistedSnapshotsTest-130347173 tempest-VolumesAssistedSnapshotsTest-130347173-project-member] Lock "27c8104f-a58f-4416-8a3b-d9be8ca6533b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.289020] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050197, 'name': ReconfigVM_Task, 'duration_secs': 0.288065} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.289344] env[63489]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk or device None with type sparse {{(pid=63489) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 531.290038] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebef3c8d-b504-4846-b173-052b63f19c0e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.298624] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 531.298624] env[63489]: value = "task-1050199" [ 531.298624] env[63489]: _type = "Task" [ 531.298624] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.306413] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050199, 'name': Rename_Task} progress is 5%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.339891] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 531.365736] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.365999] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.366192] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.366987] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.366987] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.366987] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.366987] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.366987] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.368155] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.373014] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.373014] env[63489]: DEBUG nova.virt.hardware [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.373014] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9707a676-4864-4f37-8834-32a9a5aafdd8 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.381484] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755d8cfe-a475-4b55-8a71-071e50d464ac {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.506742] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Successfully created port: c4d0be6e-bb71-426b-ab27-bfe833d526cb {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.694675] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.695260] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.699851] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.814s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.699851] env[63489]: INFO nova.compute.claims [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 531.811991] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050199, 'name': Rename_Task, 'duration_secs': 0.165714} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.812260] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powering on the VM {{(pid=63489) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 531.812507] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9985aa5-3b0a-4c07-9d5c-bd9fd0e13024 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.819056] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 531.819056] env[63489]: value = "task-1050200" [ 531.819056] env[63489]: _type = "Task" [ 531.819056] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.827287] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.204490] env[63489]: DEBUG nova.compute.utils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.205937] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 532.206112] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 532.263547] env[63489]: DEBUG nova.compute.manager [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Received event network-changed-7476d068-710a-4b58-be40-117338c13436 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 532.263910] env[63489]: DEBUG nova.compute.manager [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Refreshing instance network info cache due to event network-changed-7476d068-710a-4b58-be40-117338c13436. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 532.264562] env[63489]: DEBUG oslo_concurrency.lockutils [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] Acquiring lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.264868] env[63489]: DEBUG oslo_concurrency.lockutils [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] Acquired lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.265186] env[63489]: DEBUG nova.network.neutron [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Refreshing network info cache for port 7476d068-710a-4b58-be40-117338c13436 {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 532.330025] env[63489]: DEBUG oslo_vmware.api [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050200, 'name': PowerOnVM_Task, 'duration_secs': 0.466598} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.330311] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powered on the VM {{(pid=63489) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 532.330506] env[63489]: INFO nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Took 9.12 seconds to spawn the instance on the hypervisor. [ 532.330748] env[63489]: DEBUG nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Checking state {{(pid=63489) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 532.332791] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0371580e-fc5c-4ef0-bccc-b40c04416764 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.411576] env[63489]: DEBUG nova.policy [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e732412ee2a3411e84329da337f57c48', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d152b397e804552a49081730d70ed87', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 532.720331] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 532.831196] env[63489]: DEBUG nova.network.neutron [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 532.857588] env[63489]: INFO nova.compute.manager [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Took 18.29 seconds to build instance. [ 532.980685] env[63489]: ERROR nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 532.980685] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.980685] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 532.980685] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 532.980685] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.980685] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.980685] env[63489]: ERROR nova.compute.manager raise self.value [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 532.980685] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 532.980685] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.980685] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 532.981260] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.981260] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 532.981260] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 532.981260] env[63489]: ERROR nova.compute.manager [ 532.981260] env[63489]: Traceback (most recent call last): [ 532.981260] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 532.981260] env[63489]: listener.cb(fileno) [ 532.981260] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.981260] env[63489]: result = function(*args, **kwargs) [ 532.981260] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.981260] env[63489]: return func(*args, **kwargs) [ 532.981260] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.981260] env[63489]: raise e [ 532.981260] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.981260] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 532.981260] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 532.981260] env[63489]: created_port_ids = self._update_ports_for_instance( [ 532.981260] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 532.981260] env[63489]: with excutils.save_and_reraise_exception(): [ 532.981260] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.981260] env[63489]: self.force_reraise() [ 532.981260] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.981260] env[63489]: raise self.value [ 532.981260] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 532.981260] env[63489]: updated_port = self._update_port( [ 532.981260] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.981260] env[63489]: _ensure_no_port_binding_failure(port) [ 532.981260] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.981260] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 532.982068] env[63489]: nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 532.982068] env[63489]: Removing descriptor: 16 [ 532.982068] env[63489]: ERROR nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Traceback (most recent call last): [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] yield resources [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self.driver.spawn(context, instance, image_meta, [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.982068] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] vm_ref = self.build_virtual_machine(instance, [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] for vif in network_info: [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return self._sync_wrapper(fn, *args, **kwargs) [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self.wait() [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self[:] = self._gt.wait() [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return self._exit_event.wait() [ 532.982395] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] result = hub.switch() [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return self.greenlet.switch() [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] result = function(*args, **kwargs) [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return func(*args, **kwargs) [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] raise e [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] nwinfo = self.network_api.allocate_for_instance( [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 532.982729] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] created_port_ids = self._update_ports_for_instance( [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] with excutils.save_and_reraise_exception(): [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self.force_reraise() [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] raise self.value [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] updated_port = self._update_port( [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] _ensure_no_port_binding_failure(port) [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.983098] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] raise exception.PortBindingFailed(port_id=port['id']) [ 532.983424] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 532.983424] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] [ 532.983424] env[63489]: INFO nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Terminating instance [ 532.990163] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquiring lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.028580] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6940a60-068c-4f3b-8f8a-58bf5f5d57f9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.033636] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4820452-e6d5-49c9-9a8f-5c7d42a3cce2 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.081381] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cde59f-e2bf-42b8-bf84-c05434323a8d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.089286] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c442e6-a8ad-43a9-bb28-d4fc49193207 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.104138] env[63489]: DEBUG nova.compute.provider_tree [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 533.117623] env[63489]: DEBUG nova.network.neutron [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.360544] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4c43c3bb-c1a7-435b-bcbd-8a418dbf78e1 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "013f2d36-9578-45d2-aff0-170b5fd97506" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.802s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.608324] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Successfully created port: 1f64771d-16d2-47ac-a3ff-99a3863e1857 {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.615264] env[63489]: DEBUG nova.scheduler.client.report [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.622275] env[63489]: DEBUG oslo_concurrency.lockutils [req-cf240051-2136-484b-9b20-94e845cd5654 req-f591ba4c-82a5-46c6-b796-286701e3adc9 service nova] Releasing lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.623565] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquired lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.624423] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 533.737796] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 533.764616] env[63489]: ERROR nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 533.764616] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.764616] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.764616] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.764616] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.764616] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.764616] env[63489]: ERROR nova.compute.manager raise self.value [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.764616] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 533.764616] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.764616] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 533.765087] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.765087] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 533.765087] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 533.765087] env[63489]: ERROR nova.compute.manager [ 533.765087] env[63489]: Traceback (most recent call last): [ 533.765087] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 533.765087] env[63489]: listener.cb(fileno) [ 533.765087] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.765087] env[63489]: result = function(*args, **kwargs) [ 533.765087] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.765087] env[63489]: return func(*args, **kwargs) [ 533.765087] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.765087] env[63489]: raise e [ 533.765087] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.765087] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 533.765087] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.765087] env[63489]: created_port_ids = self._update_ports_for_instance( [ 533.765087] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.765087] env[63489]: with excutils.save_and_reraise_exception(): [ 533.765087] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.765087] env[63489]: self.force_reraise() [ 533.765087] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.765087] env[63489]: raise self.value [ 533.765087] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.765087] env[63489]: updated_port = self._update_port( [ 533.765087] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.765087] env[63489]: _ensure_no_port_binding_failure(port) [ 533.765087] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.765087] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 533.765841] env[63489]: nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 533.765841] env[63489]: Removing descriptor: 15 [ 533.765841] env[63489]: ERROR nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Traceback (most recent call last): [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] yield resources [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self.driver.spawn(context, instance, image_meta, [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.765841] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] vm_ref = self.build_virtual_machine(instance, [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] for vif in network_info: [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return self._sync_wrapper(fn, *args, **kwargs) [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self.wait() [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self[:] = self._gt.wait() [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return self._exit_event.wait() [ 533.766978] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] result = hub.switch() [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return self.greenlet.switch() [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] result = function(*args, **kwargs) [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return func(*args, **kwargs) [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] raise e [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] nwinfo = self.network_api.allocate_for_instance( [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.767351] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] created_port_ids = self._update_ports_for_instance( [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] with excutils.save_and_reraise_exception(): [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self.force_reraise() [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] raise self.value [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] updated_port = self._update_port( [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] _ensure_no_port_binding_failure(port) [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.767732] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] raise exception.PortBindingFailed(port_id=port['id']) [ 533.768068] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 533.768068] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] [ 533.768068] env[63489]: INFO nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Terminating instance [ 533.770546] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquiring lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.770546] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquired lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.770546] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 533.774524] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.774728] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.774878] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.775062] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.775650] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.778924] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.779019] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.779639] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.779639] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.779639] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.779796] env[63489]: DEBUG nova.virt.hardware [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.780585] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466a46df-befe-4fb4-b039-46eefff98ad2 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.791932] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58537604-dfcd-4731-98a3-310d955e12ce {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.863058] env[63489]: DEBUG nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 534.127194] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.127194] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 534.128278] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.634s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.195984] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.307169] env[63489]: DEBUG oslo_concurrency.lockutils [None req-111ccdb5-1ea1-4c84-ae3c-3e22109cca28 tempest-AttachInterfacesTestJSON-1102712621 tempest-AttachInterfacesTestJSON-1102712621-project-member] Acquiring lock "861a32b2-1a63-4fc5-9151-73993788e0f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.307472] env[63489]: DEBUG oslo_concurrency.lockutils [None req-111ccdb5-1ea1-4c84-ae3c-3e22109cca28 tempest-AttachInterfacesTestJSON-1102712621 tempest-AttachInterfacesTestJSON-1102712621-project-member] Lock "861a32b2-1a63-4fc5-9151-73993788e0f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.369639] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.396349] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.443535] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.636388] env[63489]: DEBUG nova.compute.utils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.645825] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 534.645825] env[63489]: DEBUG nova.network.neutron [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 534.716890] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.894661] env[63489]: DEBUG nova.policy [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad5a58b5db63420e99d354c12251f1ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef36f855dfee49258bb35f32bf7bf69e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 534.946866] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Releasing lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.947329] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 534.947561] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 534.947876] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e891fad7-1dd6-4fec-8c5d-22cfeca210bc {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.960410] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22212ad-40dc-4e3a-8002-dd39e750ab61 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.988400] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f could not be found. [ 534.988400] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 534.988400] env[63489]: INFO nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 534.988822] env[63489]: DEBUG oslo.service.loopingcall [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.988822] env[63489]: DEBUG nova.compute.manager [-] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 534.988822] env[63489]: DEBUG nova.network.neutron [-] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 535.040943] env[63489]: DEBUG nova.network.neutron [-] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.053035] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f035f09-0afd-4106-832e-280d59cedc76 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.067455] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c31881-c82c-4c8f-ba3a-07480f7d67cc {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.110132] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3cd516-9bd5-4bf2-b4ea-1b3479422094 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.121337] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a946e82-a200-4ea3-a1e4-46f0c6fedfe6 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.139472] env[63489]: DEBUG nova.compute.provider_tree [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.150681] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 535.221095] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Releasing lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.221492] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 535.221690] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 535.222363] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e74beeb-b07b-4949-bb38-d8dd9b5c75e8 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.234536] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef7ebc7-a24d-4157-a394-a995372f6cbc {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.264828] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5373362e-671e-44f4-8c0b-8396a3925c12 could not be found. [ 535.265161] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 535.265391] env[63489]: INFO nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Took 0.04 seconds to destroy the instance on the hypervisor. [ 535.265698] env[63489]: DEBUG oslo.service.loopingcall [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.265975] env[63489]: DEBUG nova.compute.manager [-] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 535.266514] env[63489]: DEBUG nova.network.neutron [-] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 535.325042] env[63489]: DEBUG nova.network.neutron [-] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.548979] env[63489]: DEBUG nova.network.neutron [-] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.642571] env[63489]: DEBUG nova.scheduler.client.report [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.826386] env[63489]: DEBUG nova.network.neutron [-] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.056027] env[63489]: INFO nova.compute.manager [-] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Took 1.07 seconds to deallocate network for instance. [ 536.058461] env[63489]: DEBUG nova.compute.claims [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 536.058556] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.122481] env[63489]: DEBUG nova.network.neutron [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Successfully created port: 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 536.157233] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.026s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.157233] env[63489]: ERROR nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Traceback (most recent call last): [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self.driver.spawn(context, instance, image_meta, [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.157233] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] vm_ref = self.build_virtual_machine(instance, [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] for vif in network_info: [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return self._sync_wrapper(fn, *args, **kwargs) [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self.wait() [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self[:] = self._gt.wait() [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return self._exit_event.wait() [ 536.157504] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] result = hub.switch() [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return self.greenlet.switch() [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] result = function(*args, **kwargs) [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] return func(*args, **kwargs) [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] raise e [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] nwinfo = self.network_api.allocate_for_instance( [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.157924] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] created_port_ids = self._update_ports_for_instance( [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] with excutils.save_and_reraise_exception(): [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] self.force_reraise() [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] raise self.value [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] updated_port = self._update_port( [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] _ensure_no_port_binding_failure(port) [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.158298] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] raise exception.PortBindingFailed(port_id=port['id']) [ 536.158889] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] nova.exception.PortBindingFailed: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. [ 536.158889] env[63489]: ERROR nova.compute.manager [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] [ 536.158889] env[63489]: DEBUG nova.compute.utils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 536.162752] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.051s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.162752] env[63489]: INFO nova.compute.claims [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.170131] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 536.176146] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Build of instance a577fdfb-711e-4f8d-b111-80e093374fc4 was re-scheduled: Binding failed for port 53fc1a40-c521-4345-98f2-e863cab2017d, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 536.176804] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 536.177174] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquiring lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.177784] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Acquired lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.179021] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 536.199873] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 536.200930] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 536.200930] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 536.200930] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 536.200930] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 536.200930] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 536.201380] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 536.201380] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 536.201380] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 536.201484] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 536.201625] env[63489]: DEBUG nova.virt.hardware [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 536.202846] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d1f670-489e-4b84-b6d4-97cb8eb3be45 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.213917] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5937922c-c543-4f3f-8814-7a931c7fd208 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.328928] env[63489]: INFO nova.compute.manager [-] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Took 1.06 seconds to deallocate network for instance. [ 536.332854] env[63489]: DEBUG nova.compute.claims [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 536.332854] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.469955] env[63489]: ERROR nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 536.469955] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.469955] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.469955] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.469955] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.469955] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.469955] env[63489]: ERROR nova.compute.manager raise self.value [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.469955] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.469955] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.469955] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.470495] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.470495] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.470495] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 536.470495] env[63489]: ERROR nova.compute.manager [ 536.470495] env[63489]: Traceback (most recent call last): [ 536.470495] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.470495] env[63489]: listener.cb(fileno) [ 536.470495] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.470495] env[63489]: result = function(*args, **kwargs) [ 536.470495] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.470495] env[63489]: return func(*args, **kwargs) [ 536.470495] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.470495] env[63489]: raise e [ 536.470495] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.470495] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 536.470495] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.470495] env[63489]: created_port_ids = self._update_ports_for_instance( [ 536.470495] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.470495] env[63489]: with excutils.save_and_reraise_exception(): [ 536.470495] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.470495] env[63489]: self.force_reraise() [ 536.470495] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.470495] env[63489]: raise self.value [ 536.470495] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.470495] env[63489]: updated_port = self._update_port( [ 536.470495] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.470495] env[63489]: _ensure_no_port_binding_failure(port) [ 536.470495] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.470495] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.471304] env[63489]: nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 536.471304] env[63489]: Removing descriptor: 17 [ 536.471304] env[63489]: ERROR nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Traceback (most recent call last): [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] yield resources [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self.driver.spawn(context, instance, image_meta, [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.471304] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] vm_ref = self.build_virtual_machine(instance, [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] for vif in network_info: [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return self._sync_wrapper(fn, *args, **kwargs) [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self.wait() [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self[:] = self._gt.wait() [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return self._exit_event.wait() [ 536.471624] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] result = hub.switch() [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return self.greenlet.switch() [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] result = function(*args, **kwargs) [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return func(*args, **kwargs) [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] raise e [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] nwinfo = self.network_api.allocate_for_instance( [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.472436] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] created_port_ids = self._update_ports_for_instance( [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] with excutils.save_and_reraise_exception(): [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self.force_reraise() [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] raise self.value [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] updated_port = self._update_port( [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] _ensure_no_port_binding_failure(port) [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.472879] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] raise exception.PortBindingFailed(port_id=port['id']) [ 536.473227] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 536.473227] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] [ 536.473227] env[63489]: INFO nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Terminating instance [ 536.474959] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquiring lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.475223] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquired lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.477437] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 536.523041] env[63489]: DEBUG nova.compute.manager [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Received event network-changed-addaad04-c9d7-4994-842b-b3501824f997 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.523346] env[63489]: DEBUG nova.compute.manager [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Refreshing instance network info cache due to event network-changed-addaad04-c9d7-4994-842b-b3501824f997. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 536.523346] env[63489]: DEBUG oslo_concurrency.lockutils [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] Acquiring lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.523549] env[63489]: DEBUG oslo_concurrency.lockutils [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] Acquired lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.523648] env[63489]: DEBUG nova.network.neutron [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Refreshing network info cache for port addaad04-c9d7-4994-842b-b3501824f997 {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 536.724155] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.925489] env[63489]: DEBUG nova.compute.manager [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Received event network-changed-c4d0be6e-bb71-426b-ab27-bfe833d526cb {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.925703] env[63489]: DEBUG nova.compute.manager [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Refreshing instance network info cache due to event network-changed-c4d0be6e-bb71-426b-ab27-bfe833d526cb. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 536.925900] env[63489]: DEBUG oslo_concurrency.lockutils [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] Acquiring lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.959920] env[63489]: INFO nova.compute.manager [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Rebuilding instance [ 537.013775] env[63489]: DEBUG nova.compute.manager [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Checking state {{(pid=63489) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 537.014581] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21700d72-699a-4861-9dc1-537fa5b1f1a5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.025598] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.056157] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.083893] env[63489]: DEBUG nova.network.neutron [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.416195] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.441867] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3779eaf1-d999-448d-be91-9e106d443cb9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.452127] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bd5e6e-a27a-4e85-b347-817c79c4fafa {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.492144] env[63489]: DEBUG nova.network.neutron [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.496040] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb74e86e-9d41-4056-8f0f-279f3e7cb280 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.503229] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3be01a-1f54-4f48-a6f2-9bba554a9f1b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.521454] env[63489]: DEBUG nova.compute.provider_tree [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.533604] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powering off the VM {{(pid=63489) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 537.533855] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a92d003-5883-485d-a039-fec64647cf21 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.540672] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 537.540672] env[63489]: value = "task-1050204" [ 537.540672] env[63489]: _type = "Task" [ 537.540672] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.549694] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.559056] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Releasing lock "refresh_cache-a577fdfb-711e-4f8d-b111-80e093374fc4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.559216] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 537.559418] env[63489]: DEBUG nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.559590] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.605729] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.919956] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Releasing lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.919956] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 537.923167] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 537.923167] env[63489]: DEBUG oslo_concurrency.lockutils [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] Acquired lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.923167] env[63489]: DEBUG nova.network.neutron [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Refreshing network info cache for port c4d0be6e-bb71-426b-ab27-bfe833d526cb {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 537.923167] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b991dd2-ab48-4d04-9fa7-babda7f90700 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.939235] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af3d6a2-5c15-4a8e-b490-c25428038a20 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.966950] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e32d6c0-d943-416f-9a54-e3511c933ca9 could not be found. [ 537.966950] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 537.967152] env[63489]: INFO nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 537.967416] env[63489]: DEBUG oslo.service.loopingcall [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 537.967563] env[63489]: DEBUG nova.compute.manager [-] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.967678] env[63489]: DEBUG nova.network.neutron [-] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.997087] env[63489]: DEBUG oslo_concurrency.lockutils [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] Releasing lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.997554] env[63489]: DEBUG nova.compute.manager [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Received event network-vif-deleted-7476d068-710a-4b58-be40-117338c13436 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 537.999011] env[63489]: DEBUG nova.compute.manager [req-3288a469-e74a-453d-ac5d-c0b1493fe420 req-34b26d82-3988-45de-9499-34c5552de624 service nova] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Received event network-vif-deleted-addaad04-c9d7-4994-842b-b3501824f997 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 538.001633] env[63489]: DEBUG nova.network.neutron [-] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.024720] env[63489]: DEBUG nova.scheduler.client.report [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 538.052857] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050204, 'name': PowerOffVM_Task, 'duration_secs': 0.141646} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.053703] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powered off the VM {{(pid=63489) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 538.053703] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 538.054206] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6fecbd-6db0-4643-a330-5c51bec04f91 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.066180] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Unregistering the VM {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 538.066443] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-096b1b9f-fdf1-4398-9e38-85e0969bf57b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.095791] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Unregistered the VM {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 538.096058] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deleting contents of the VM from datastore datastore2 {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 538.096240] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleting the datastore file [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506 {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 538.096493] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d169e336-6a94-4c9a-a463-75ed57dff427 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.103704] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 538.103704] env[63489]: value = "task-1050206" [ 538.103704] env[63489]: _type = "Task" [ 538.103704] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.112156] env[63489]: DEBUG nova.network.neutron [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.113294] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.437909] env[63489]: ERROR nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 538.437909] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 538.437909] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.437909] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.437909] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.437909] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.437909] env[63489]: ERROR nova.compute.manager raise self.value [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.437909] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 538.437909] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.437909] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 538.438431] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.438431] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 538.438431] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 538.438431] env[63489]: ERROR nova.compute.manager [ 538.438431] env[63489]: Traceback (most recent call last): [ 538.438431] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 538.438431] env[63489]: listener.cb(fileno) [ 538.438431] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.438431] env[63489]: result = function(*args, **kwargs) [ 538.438431] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.438431] env[63489]: return func(*args, **kwargs) [ 538.438431] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 538.438431] env[63489]: raise e [ 538.438431] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 538.438431] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 538.438431] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.438431] env[63489]: created_port_ids = self._update_ports_for_instance( [ 538.438431] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.438431] env[63489]: with excutils.save_and_reraise_exception(): [ 538.438431] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.438431] env[63489]: self.force_reraise() [ 538.438431] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.438431] env[63489]: raise self.value [ 538.438431] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.438431] env[63489]: updated_port = self._update_port( [ 538.438431] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.438431] env[63489]: _ensure_no_port_binding_failure(port) [ 538.438431] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.438431] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 538.439293] env[63489]: nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 538.439293] env[63489]: Removing descriptor: 19 [ 538.439293] env[63489]: ERROR nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Traceback (most recent call last): [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] yield resources [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self.driver.spawn(context, instance, image_meta, [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self._vmops.spawn(context, instance, image_meta, injected_files, [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 538.439293] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] vm_ref = self.build_virtual_machine(instance, [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] vif_infos = vmwarevif.get_vif_info(self._session, [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] for vif in network_info: [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return self._sync_wrapper(fn, *args, **kwargs) [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self.wait() [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self[:] = self._gt.wait() [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return self._exit_event.wait() [ 538.439659] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] result = hub.switch() [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return self.greenlet.switch() [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] result = function(*args, **kwargs) [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return func(*args, **kwargs) [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] raise e [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] nwinfo = self.network_api.allocate_for_instance( [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.440025] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] created_port_ids = self._update_ports_for_instance( [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] with excutils.save_and_reraise_exception(): [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self.force_reraise() [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] raise self.value [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] updated_port = self._update_port( [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] _ensure_no_port_binding_failure(port) [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.440395] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] raise exception.PortBindingFailed(port_id=port['id']) [ 538.440720] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 538.440720] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] [ 538.440720] env[63489]: INFO nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Terminating instance [ 538.444068] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquiring lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.444068] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquired lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.444068] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 538.506630] env[63489]: DEBUG nova.network.neutron [-] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.528641] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.529190] env[63489]: DEBUG nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 538.533171] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.334s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.533226] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.533343] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63489) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 538.538018] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.134s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.542612] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0756e7-ac0c-4b13-b5b8-82c4242891af {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.552932] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1487b030-f288-4395-8745-d6f6ebede01e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.573947] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8191bb54-d013-4879-bcfa-070d14e7b6b5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.585226] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68738021-912c-4da6-b50f-dfd08a04ee1c {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.623160] env[63489]: INFO nova.compute.manager [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] [instance: a577fdfb-711e-4f8d-b111-80e093374fc4] Took 1.06 seconds to deallocate network for instance. [ 538.627040] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181615MB free_disk=182GB free_vcpus=48 pci_devices=None {{(pid=63489) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 538.627040] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.635531] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118718} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.635892] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleted the datastore file {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 538.636181] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deleted contents of the VM from datastore datastore2 {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 538.636377] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 538.645735] env[63489]: DEBUG nova.network.neutron [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.979914] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.988970] env[63489]: DEBUG nova.network.neutron [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.011247] env[63489]: INFO nova.compute.manager [-] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Took 1.04 seconds to deallocate network for instance. [ 539.013609] env[63489]: DEBUG nova.compute.claims [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 539.013796] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.039745] env[63489]: DEBUG nova.compute.utils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.043913] env[63489]: DEBUG nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 539.043913] env[63489]: DEBUG nova.network.neutron [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 539.140265] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.233430] env[63489]: DEBUG nova.policy [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fabc70777eb4b33a32de7bf0825f166', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83342d9244ac442c8532338c86c21a4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 539.311902] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6222f444-8072-4674-a6fa-8ba48a195f7d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.320586] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe53391-e185-431e-8f94-5459e89f15b4 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.360631] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f171846-05a0-475c-b3ea-f58a48ba0a77 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.369511] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d46ca0-d6bb-4c2d-9a23-648cb7c16de9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.387390] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 539.492781] env[63489]: DEBUG oslo_concurrency.lockutils [req-98ecc4e8-a344-4cf3-8f01-8c44e154bcd7 req-f28024dd-1400-4735-9eef-0e4bbe5ac137 service nova] Releasing lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.545414] env[63489]: DEBUG nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 539.647365] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Releasing lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.647752] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 539.648377] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 539.653197] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63ccd527-fc1c-4b7d-9ff7-c961c52e260d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.665300] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a56fc1-e09e-4e44-8778-8acad8c655c0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.676386] env[63489]: INFO nova.scheduler.client.report [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Deleted allocations for instance a577fdfb-711e-4f8d-b111-80e093374fc4 [ 539.698450] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.698801] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.698984] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.699183] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.699325] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.699467] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.699680] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.699835] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.699996] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.700167] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.700332] env[63489]: DEBUG nova.virt.hardware [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.701331] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62da534-8bc3-439b-8f98-0ca52cff9ca3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.707991] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 276c32a2-da0f-420f-a9f2-b13c1fd62586 could not be found. [ 539.708204] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 539.708380] env[63489]: INFO nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Took 0.06 seconds to destroy the instance on the hypervisor. [ 539.708608] env[63489]: DEBUG oslo.service.loopingcall [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.709230] env[63489]: DEBUG nova.compute.manager [-] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.709357] env[63489]: DEBUG nova.network.neutron [-] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.715522] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3455ddef-e8b1-4f67-8e7d-c2a8533ed73a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.734916] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance VIF info [] {{(pid=63489) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 539.740671] env[63489]: DEBUG oslo.service.loopingcall [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.741520] env[63489]: DEBUG nova.network.neutron [-] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.742949] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Creating VM on the ESX host {{(pid=63489) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 539.744267] env[63489]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80c8ed97-5b35-4205-b93e-2ce46f3fd1fc {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.765462] env[63489]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 539.765462] env[63489]: value = "task-1050208" [ 539.765462] env[63489]: _type = "Task" [ 539.765462] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.774021] env[63489]: DEBUG oslo_vmware.api [-] Task: {'id': task-1050208, 'name': CreateVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.914891] env[63489]: ERROR nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [req-0d6e9525-738e-4dad-9391-5ad5da925476] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0d6e9525-738e-4dad-9391-5ad5da925476"}]}: nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 539.944697] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 539.958089] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 539.958574] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 539.972205] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 540.003581] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 540.191260] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e65eac10-f688-4ac5-b2e2-439007e37cbe tempest-FloatingIPsAssociationTestJSON-87596711 tempest-FloatingIPsAssociationTestJSON-87596711-project-member] Lock "a577fdfb-711e-4f8d-b111-80e093374fc4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.165s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.241307] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbaefca-3a07-47db-8d58-e6db973b1b8b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.251331] env[63489]: DEBUG nova.network.neutron [-] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.257435] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b487ef70-e35b-4125-814a-07e47862a3bd {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.295894] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3164bd3b-bb20-4d47-b659-1466bd515e39 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.306061] env[63489]: DEBUG oslo_vmware.api [-] Task: {'id': task-1050208, 'name': CreateVM_Task, 'duration_secs': 0.281367} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.307991] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Created VM on the ESX host {{(pid=63489) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 540.308492] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.308656] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.308976] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 540.310314] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dad607-d13e-48a9-8455-bbe3d827854b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.314482] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-970a4f52-4801-47ac-9ac0-960ecfa8adcd {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.331922] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 540.331922] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]5236eab6-9b91-7124-709a-7931604be839" [ 540.331922] env[63489]: _type = "Task" [ 540.331922] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.331922] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 540.342037] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]5236eab6-9b91-7124-709a-7931604be839, 'name': SearchDatastore_Task, 'duration_secs': 0.009308} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.342812] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.342812] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Processing image 314454d0-cea8-4ac2-8a2e-d19d8731016c {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 540.342915] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.346277] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.346777] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 540.348824] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1c7c35f-1ac8-4871-8323-e08cd63083ee {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.356267] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 540.356383] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=63489) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 540.358135] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5530fd0e-1195-487b-8bc9-ef8fde21271a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.366086] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 540.366086] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]52bf29ca-12c7-9ef1-53d3-4d05418e71e8" [ 540.366086] env[63489]: _type = "Task" [ 540.366086] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.374402] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52bf29ca-12c7-9ef1-53d3-4d05418e71e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.559859] env[63489]: DEBUG nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 540.577454] env[63489]: ERROR nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 540.577454] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.577454] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.577454] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.577454] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.577454] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.577454] env[63489]: ERROR nova.compute.manager raise self.value [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.577454] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 540.577454] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.577454] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 540.577990] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.577990] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 540.577990] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 540.577990] env[63489]: ERROR nova.compute.manager [ 540.577990] env[63489]: Traceback (most recent call last): [ 540.577990] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 540.577990] env[63489]: listener.cb(fileno) [ 540.577990] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.577990] env[63489]: result = function(*args, **kwargs) [ 540.577990] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.577990] env[63489]: return func(*args, **kwargs) [ 540.577990] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.577990] env[63489]: raise e [ 540.577990] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.577990] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 540.577990] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.577990] env[63489]: created_port_ids = self._update_ports_for_instance( [ 540.577990] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.577990] env[63489]: with excutils.save_and_reraise_exception(): [ 540.577990] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.577990] env[63489]: self.force_reraise() [ 540.577990] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.577990] env[63489]: raise self.value [ 540.577990] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.577990] env[63489]: updated_port = self._update_port( [ 540.577990] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.577990] env[63489]: _ensure_no_port_binding_failure(port) [ 540.577990] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.577990] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 540.578786] env[63489]: nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 540.578786] env[63489]: Removing descriptor: 15 [ 540.578786] env[63489]: ERROR nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Traceback (most recent call last): [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] yield resources [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self.driver.spawn(context, instance, image_meta, [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 540.578786] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] vm_ref = self.build_virtual_machine(instance, [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] for vif in network_info: [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return self._sync_wrapper(fn, *args, **kwargs) [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self.wait() [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self[:] = self._gt.wait() [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return self._exit_event.wait() [ 540.579119] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] result = hub.switch() [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return self.greenlet.switch() [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] result = function(*args, **kwargs) [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return func(*args, **kwargs) [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] raise e [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] nwinfo = self.network_api.allocate_for_instance( [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.579465] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] created_port_ids = self._update_ports_for_instance( [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] with excutils.save_and_reraise_exception(): [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self.force_reraise() [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] raise self.value [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] updated_port = self._update_port( [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] _ensure_no_port_binding_failure(port) [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.579832] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] raise exception.PortBindingFailed(port_id=port['id']) [ 540.580261] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 540.580261] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] [ 540.580261] env[63489]: INFO nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Terminating instance [ 540.582032] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquiring lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.582032] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquired lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.582032] env[63489]: DEBUG nova.network.neutron [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 540.590843] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.591088] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.591248] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.591425] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.591571] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.591718] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.591928] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.592404] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.592613] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.592786] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.592964] env[63489]: DEBUG nova.virt.hardware [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.593805] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650f3e7f-daa7-46a0-92d2-0b0451b48fb3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.604220] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b113160d-1630-446e-8d60-13d0e53844a4 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.693760] env[63489]: DEBUG nova.compute.manager [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 540.753290] env[63489]: DEBUG nova.network.neutron [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Successfully created port: 9f04c3d1-906b-4924-abce-e6761a8c090d {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.755562] env[63489]: INFO nova.compute.manager [-] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Took 1.05 seconds to deallocate network for instance. [ 540.757684] env[63489]: DEBUG nova.compute.claims [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 540.757879] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.859504] env[63489]: ERROR nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [req-dbd23185-c7cc-4ebb-9df0-2f4caa7faeef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dbd23185-c7cc-4ebb-9df0-2f4caa7faeef"}]}: nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 540.877480] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52bf29ca-12c7-9ef1-53d3-4d05418e71e8, 'name': SearchDatastore_Task, 'duration_secs': 0.008761} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.878569] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 540.881772] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ec9ac44-99d8-41ce-ba99-d8119235ed77 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.886723] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 540.886723] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]52b210fd-c629-eec3-d2aa-3cdaa6c95125" [ 540.886723] env[63489]: _type = "Task" [ 540.886723] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.894490] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52b210fd-c629-eec3-d2aa-3cdaa6c95125, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.897217] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 540.897486] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 540.914590] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 540.952418] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 541.066481] env[63489]: DEBUG nova.compute.manager [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Received event network-vif-deleted-c4d0be6e-bb71-426b-ab27-bfe833d526cb {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 541.067268] env[63489]: DEBUG nova.compute.manager [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Received event network-changed-1f64771d-16d2-47ac-a3ff-99a3863e1857 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 541.067802] env[63489]: DEBUG nova.compute.manager [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Refreshing instance network info cache due to event network-changed-1f64771d-16d2-47ac-a3ff-99a3863e1857. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 541.068399] env[63489]: DEBUG oslo_concurrency.lockutils [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] Acquiring lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.069018] env[63489]: DEBUG oslo_concurrency.lockutils [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] Acquired lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.069435] env[63489]: DEBUG nova.network.neutron [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Refreshing network info cache for port 1f64771d-16d2-47ac-a3ff-99a3863e1857 {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 541.076072] env[63489]: DEBUG oslo_concurrency.lockutils [None req-df86fdda-cf84-4a3e-9181-3d9bb58a7623 tempest-ServerDiskConfigTestJSON-438976484 tempest-ServerDiskConfigTestJSON-438976484-project-member] Acquiring lock "0a6b42d7-a77c-4047-bf7d-17a31872d955" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.076560] env[63489]: DEBUG oslo_concurrency.lockutils [None req-df86fdda-cf84-4a3e-9181-3d9bb58a7623 tempest-ServerDiskConfigTestJSON-438976484 tempest-ServerDiskConfigTestJSON-438976484-project-member] Lock "0a6b42d7-a77c-4047-bf7d-17a31872d955" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.124323] env[63489]: DEBUG nova.network.neutron [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.224233] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.228501] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212991ff-ba10-40e4-a9a3-48ac54440352 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.236856] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648d7a38-0ab1-4f65-88ec-b7399b12e05e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.276253] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300d1ca9-b6da-4258-a2bc-d9af0690445b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.284633] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0317704-ebce-4ccf-9f07-0bb13a658185 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.298454] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.400448] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52b210fd-c629-eec3-d2aa-3cdaa6c95125, 'name': SearchDatastore_Task, 'duration_secs': 0.009162} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.400448] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Releasing lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.400448] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk to [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 541.400620] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49516ff0-bc9c-41d1-af49-d7c9f7158f9d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.403418] env[63489]: DEBUG nova.network.neutron [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.410289] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 541.410289] env[63489]: value = "task-1050209" [ 541.410289] env[63489]: _type = "Task" [ 541.410289] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.418879] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.636998] env[63489]: DEBUG nova.network.neutron [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.701783] env[63489]: DEBUG oslo_concurrency.lockutils [None req-8b49f2a4-16a9-48d6-9b2f-854ddd3cfc2e tempest-ServersTestJSON-808274608 tempest-ServersTestJSON-808274608-project-member] Acquiring lock "08d4467f-be74-4306-bbc6-2ee62c5fa136" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.702037] env[63489]: DEBUG oslo_concurrency.lockutils [None req-8b49f2a4-16a9-48d6-9b2f-854ddd3cfc2e tempest-ServersTestJSON-808274608 tempest-ServersTestJSON-808274608-project-member] Lock "08d4467f-be74-4306-bbc6-2ee62c5fa136" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.827389] env[63489]: ERROR nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [req-6890d32d-b6e3-4248-9ff9-1964597fa8af] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6890d32d-b6e3-4248-9ff9-1964597fa8af"}]}: nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 541.831550] env[63489]: DEBUG nova.network.neutron [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.858706] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 541.888998] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 541.889505] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.903132] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 541.905939] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Releasing lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.906383] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 541.906578] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 541.907076] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16985ec5-c6ce-4021-8c7a-f12e62a0c4b4 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.920016] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0d2f0f-97b6-43b1-a0e4-72a9bbe1c132 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.934467] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 541.938279] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050209, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509754} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.939187] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk to [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 541.939466] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Extending root virtual disk to 1048576 {{(pid=63489) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 541.940226] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3bcc8d0-9f12-4447-969d-ffe448fd7469 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.951165] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81e42a2c-fb30-42e2-a2a9-45f3184739e4 could not be found. [ 541.951403] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 541.951593] env[63489]: INFO nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 541.954059] env[63489]: DEBUG oslo.service.loopingcall [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 541.954059] env[63489]: DEBUG nova.compute.manager [-] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 541.954059] env[63489]: DEBUG nova.network.neutron [-] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 541.956159] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 541.956159] env[63489]: value = "task-1050210" [ 541.956159] env[63489]: _type = "Task" [ 541.956159] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.971944] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.981037] env[63489]: DEBUG nova.network.neutron [-] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.028396] env[63489]: DEBUG nova.compute.manager [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Received event network-changed-2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 542.028396] env[63489]: DEBUG nova.compute.manager [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Refreshing instance network info cache due to event network-changed-2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 542.028681] env[63489]: DEBUG oslo_concurrency.lockutils [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] Acquiring lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.028681] env[63489]: DEBUG oslo_concurrency.lockutils [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] Acquired lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.028814] env[63489]: DEBUG nova.network.neutron [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Refreshing network info cache for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 542.215373] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f2a029-87f6-4bfb-9b1e-8c703989bb84 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.223680] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc1e7b6-5c68-4485-a223-dec7012798fe {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.255020] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f55982-0a60-4812-a6b8-b384e149a281 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.262386] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3b4bf3-240c-4d42-b467-b80bd27356e9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.278750] env[63489]: DEBUG nova.compute.provider_tree [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.335981] env[63489]: DEBUG oslo_concurrency.lockutils [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] Releasing lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.335981] env[63489]: DEBUG nova.compute.manager [req-23202479-3530-48a8-9036-b92f4c88d968 req-df66806d-a5a6-4755-94f9-01ef8e8ad74b service nova] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Received event network-vif-deleted-1f64771d-16d2-47ac-a3ff-99a3863e1857 {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 542.467608] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073403} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.467922] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Extended root virtual disk {{(pid=63489) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 542.470309] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b97aa64-df53-4366-9abd-db6fb5ae4a32 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.489859] env[63489]: DEBUG nova.network.neutron [-] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.506513] env[63489]: DEBUG nova.virt.vmwareapi.volumeops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk or device None with type sparse {{(pid=63489) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 542.507746] env[63489]: INFO nova.compute.manager [-] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Took 0.55 seconds to deallocate network for instance. [ 542.508659] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3d3093a-d20e-46d4-aee0-90d7ca4f49f7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.527978] env[63489]: DEBUG nova.compute.claims [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 542.527978] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.535493] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 542.535493] env[63489]: value = "task-1050211" [ 542.535493] env[63489]: _type = "Task" [ 542.535493] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.546462] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050211, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.740763] env[63489]: DEBUG nova.network.neutron [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.785016] env[63489]: DEBUG nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.905315] env[63489]: DEBUG nova.network.neutron [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.054082] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050211, 'name': ReconfigVM_Task, 'duration_secs': 0.32152} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.054082] env[63489]: DEBUG nova.virt.vmwareapi.volumeops [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk or device None with type sparse {{(pid=63489) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 543.054082] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5daba64-814d-47e0-a8ba-157d8af72543 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.060311] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 543.060311] env[63489]: value = "task-1050212" [ 543.060311] env[63489]: _type = "Task" [ 543.060311] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.071405] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050212, 'name': Rename_Task} progress is 5%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.292015] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 4.758s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.293806] env[63489]: ERROR nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Traceback (most recent call last): [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self.driver.spawn(context, instance, image_meta, [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] vm_ref = self.build_virtual_machine(instance, [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.293806] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] for vif in network_info: [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return self._sync_wrapper(fn, *args, **kwargs) [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self.wait() [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self[:] = self._gt.wait() [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return self._exit_event.wait() [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] result = hub.switch() [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.294166] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return self.greenlet.switch() [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] result = function(*args, **kwargs) [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] return func(*args, **kwargs) [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] raise e [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] nwinfo = self.network_api.allocate_for_instance( [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] created_port_ids = self._update_ports_for_instance( [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] with excutils.save_and_reraise_exception(): [ 543.295543] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] self.force_reraise() [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] raise self.value [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] updated_port = self._update_port( [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] _ensure_no_port_binding_failure(port) [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] raise exception.PortBindingFailed(port_id=port['id']) [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] nova.exception.PortBindingFailed: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. [ 543.296009] env[63489]: ERROR nova.compute.manager [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] [ 543.296347] env[63489]: DEBUG nova.compute.utils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 543.299087] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.273s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.302781] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Build of instance 9bb676ad-a339-4a05-9a66-3bb817543156 was re-scheduled: Binding failed for port c3ce6b67-3b42-4c01-9783-0ed5859bffd4, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 543.303397] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 543.303776] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquiring lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.304057] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Acquired lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.304328] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 543.409360] env[63489]: DEBUG oslo_concurrency.lockutils [req-fd8e433b-7845-4e65-b32d-b9b902ac69bc req-879141b2-4710-47e0-9a14-9f3856aa4be8 service nova] Releasing lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.570614] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050212, 'name': Rename_Task, 'duration_secs': 0.147865} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.570885] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powering on the VM {{(pid=63489) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 543.571146] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c1f6a1b-dab6-4f9d-8b79-61ef998d3f91 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.580269] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 543.580269] env[63489]: value = "task-1050213" [ 543.580269] env[63489]: _type = "Task" [ 543.580269] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.590086] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.867350] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.076367] env[63489]: ERROR nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9f04c3d1-906b-4924-abce-e6761a8c090d, please check neutron logs for more information. [ 544.076367] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.076367] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.076367] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.076367] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.076367] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.076367] env[63489]: ERROR nova.compute.manager raise self.value [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.076367] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 544.076367] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.076367] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 544.076813] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.076813] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 544.076813] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9f04c3d1-906b-4924-abce-e6761a8c090d, please check neutron logs for more information. [ 544.076813] env[63489]: ERROR nova.compute.manager [ 544.076813] env[63489]: Traceback (most recent call last): [ 544.076813] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 544.076813] env[63489]: listener.cb(fileno) [ 544.076813] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.076813] env[63489]: result = function(*args, **kwargs) [ 544.076813] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.076813] env[63489]: return func(*args, **kwargs) [ 544.076813] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.076813] env[63489]: raise e [ 544.076813] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.076813] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 544.076813] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.076813] env[63489]: created_port_ids = self._update_ports_for_instance( [ 544.076813] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.076813] env[63489]: with excutils.save_and_reraise_exception(): [ 544.076813] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.076813] env[63489]: self.force_reraise() [ 544.076813] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.076813] env[63489]: raise self.value [ 544.076813] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.076813] env[63489]: updated_port = self._update_port( [ 544.076813] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.076813] env[63489]: _ensure_no_port_binding_failure(port) [ 544.076813] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.076813] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 544.077577] env[63489]: nova.exception.PortBindingFailed: Binding failed for port 9f04c3d1-906b-4924-abce-e6761a8c090d, please check neutron logs for more information. [ 544.077577] env[63489]: Removing descriptor: 16 [ 544.077577] env[63489]: ERROR nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9f04c3d1-906b-4924-abce-e6761a8c090d, please check neutron logs for more information. [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Traceback (most recent call last): [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] yield resources [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] self.driver.spawn(context, instance, image_meta, [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] self._vmops.spawn(context, instance, image_meta, injected_files, [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 544.077577] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] vm_ref = self.build_virtual_machine(instance, [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] vif_infos = vmwarevif.get_vif_info(self._session, [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] for vif in network_info: [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] return self._sync_wrapper(fn, *args, **kwargs) [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] self.wait() [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] self[:] = self._gt.wait() [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] return self._exit_event.wait() [ 544.078023] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] result = hub.switch() [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] return self.greenlet.switch() [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] result = function(*args, **kwargs) [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] return func(*args, **kwargs) [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] raise e [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] nwinfo = self.network_api.allocate_for_instance( [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.078382] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] created_port_ids = self._update_ports_for_instance( [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] with excutils.save_and_reraise_exception(): [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] self.force_reraise() [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] raise self.value [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] updated_port = self._update_port( [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] _ensure_no_port_binding_failure(port) [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.078872] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] raise exception.PortBindingFailed(port_id=port['id']) [ 544.079214] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] nova.exception.PortBindingFailed: Binding failed for port 9f04c3d1-906b-4924-abce-e6761a8c090d, please check neutron logs for more information. [ 544.079214] env[63489]: ERROR nova.compute.manager [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] [ 544.079214] env[63489]: INFO nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Terminating instance [ 544.089910] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Acquiring lock "refresh_cache-bd337b87-0c9e-44eb-81bf-572610ac5680" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.089910] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Acquired lock "refresh_cache-bd337b87-0c9e-44eb-81bf-572610ac5680" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.089910] env[63489]: DEBUG nova.network.neutron [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 544.097852] env[63489]: DEBUG oslo_vmware.api [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050213, 'name': PowerOnVM_Task, 'duration_secs': 0.474765} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.098593] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powered on the VM {{(pid=63489) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 544.098813] env[63489]: DEBUG nova.compute.manager [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Checking state {{(pid=63489) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 544.099614] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f28cae-2e80-414c-a487-9f22737fbd5a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.104929] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b4b5c6-41e9-4d69-a1b6-5d32736523a8 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.120483] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c972738-367a-4c88-8edc-4ec23da12279 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.156691] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.158371] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6370eeb-10dd-4465-bb3b-3e6c7e1a446b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.169135] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d328d329-1a25-4f28-98d0-92736fe7f5d9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.185643] env[63489]: DEBUG nova.compute.provider_tree [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.638875] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.639910] env[63489]: DEBUG nova.network.neutron [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.664523] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Releasing lock "refresh_cache-9bb676ad-a339-4a05-9a66-3bb817543156" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.664523] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 544.664523] env[63489]: DEBUG nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 544.664523] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 544.690397] env[63489]: DEBUG nova.scheduler.client.report [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.706644] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.810649] env[63489]: DEBUG nova.network.neutron [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.996688] env[63489]: DEBUG oslo_concurrency.lockutils [None req-9ca69050-8458-4a46-998e-a47b2b6170c0 tempest-ServerActionsV293TestJSON-838824961 tempest-ServerActionsV293TestJSON-838824961-project-member] Acquiring lock "1e26edbe-4f6a-4e14-af55-48888910eb9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.996987] env[63489]: DEBUG oslo_concurrency.lockutils [None req-9ca69050-8458-4a46-998e-a47b2b6170c0 tempest-ServerActionsV293TestJSON-838824961 tempest-ServerActionsV293TestJSON-838824961-project-member] Lock "1e26edbe-4f6a-4e14-af55-48888910eb9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.195248] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.897s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.195847] env[63489]: ERROR nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Traceback (most recent call last): [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self.driver.spawn(context, instance, image_meta, [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] vm_ref = self.build_virtual_machine(instance, [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.195847] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] for vif in network_info: [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return self._sync_wrapper(fn, *args, **kwargs) [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self.wait() [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self[:] = self._gt.wait() [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return self._exit_event.wait() [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] result = hub.switch() [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.196223] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return self.greenlet.switch() [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] result = function(*args, **kwargs) [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] return func(*args, **kwargs) [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] raise e [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] nwinfo = self.network_api.allocate_for_instance( [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] created_port_ids = self._update_ports_for_instance( [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] with excutils.save_and_reraise_exception(): [ 545.196614] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] self.force_reraise() [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] raise self.value [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] updated_port = self._update_port( [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] _ensure_no_port_binding_failure(port) [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] raise exception.PortBindingFailed(port_id=port['id']) [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] nova.exception.PortBindingFailed: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. [ 545.197038] env[63489]: ERROR nova.compute.manager [instance: 44c8d268-4c23-4b85-915d-3c708586046e] [ 545.197369] env[63489]: DEBUG nova.compute.utils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 545.198907] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.803s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.200531] env[63489]: INFO nova.compute.claims [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.206748] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Build of instance 44c8d268-4c23-4b85-915d-3c708586046e was re-scheduled: Binding failed for port 863ad12f-85ef-4ee7-a683-03416fb6d240, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 545.206748] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 545.206748] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquiring lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.206748] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Acquired lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.206949] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.213410] env[63489]: DEBUG nova.network.neutron [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.313878] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Releasing lock "refresh_cache-bd337b87-0c9e-44eb-81bf-572610ac5680" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.314062] env[63489]: DEBUG nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 545.314367] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 545.314666] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65cab79e-5c1c-4a02-b44d-7191cddebfa7 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.326923] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921785c7-b55f-4bd4-a112-5909f2786291 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.355043] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bd337b87-0c9e-44eb-81bf-572610ac5680 could not be found. [ 545.355236] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 545.355394] env[63489]: INFO nova.compute.manager [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Took 0.04 seconds to destroy the instance on the hypervisor. [ 545.355732] env[63489]: DEBUG oslo.service.loopingcall [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 545.355863] env[63489]: DEBUG nova.compute.manager [-] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 545.355918] env[63489]: DEBUG nova.network.neutron [-] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 545.523324] env[63489]: DEBUG nova.network.neutron [-] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.715852] env[63489]: INFO nova.compute.manager [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] [instance: 9bb676ad-a339-4a05-9a66-3bb817543156] Took 1.05 seconds to deallocate network for instance. [ 545.739456] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.825199] env[63489]: DEBUG nova.compute.manager [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Received event network-vif-deleted-2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 545.825402] env[63489]: DEBUG nova.compute.manager [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Received event network-changed-9f04c3d1-906b-4924-abce-e6761a8c090d {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 545.825561] env[63489]: DEBUG nova.compute.manager [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Refreshing instance network info cache due to event network-changed-9f04c3d1-906b-4924-abce-e6761a8c090d. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 545.825802] env[63489]: DEBUG oslo_concurrency.lockutils [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] Acquiring lock "refresh_cache-bd337b87-0c9e-44eb-81bf-572610ac5680" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.826526] env[63489]: DEBUG oslo_concurrency.lockutils [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] Acquired lock "refresh_cache-bd337b87-0c9e-44eb-81bf-572610ac5680" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.826526] env[63489]: DEBUG nova.network.neutron [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Refreshing network info cache for port 9f04c3d1-906b-4924-abce-e6761a8c090d {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 545.999523] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.026787] env[63489]: DEBUG nova.network.neutron [-] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.206967] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4a7217a7-e792-4de2-accd-f27068b98e41 tempest-AttachInterfacesV270Test-564248798 tempest-AttachInterfacesV270Test-564248798-project-member] Acquiring lock "1c9f6ec2-07a8-420b-9fa9-08bc4f94400f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.207222] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4a7217a7-e792-4de2-accd-f27068b98e41 tempest-AttachInterfacesV270Test-564248798 tempest-AttachInterfacesV270Test-564248798-project-member] Lock "1c9f6ec2-07a8-420b-9fa9-08bc4f94400f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.363783] env[63489]: DEBUG nova.network.neutron [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.438378] env[63489]: DEBUG nova.network.neutron [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.503193] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Releasing lock "refresh_cache-44c8d268-4c23-4b85-915d-3c708586046e" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.503193] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 546.503193] env[63489]: DEBUG nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 546.503193] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 546.530167] env[63489]: INFO nova.compute.manager [-] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Took 1.17 seconds to deallocate network for instance. [ 546.531022] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7004a9a-3e5f-4742-b616-bc65db532284 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.537230] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.542667] env[63489]: DEBUG nova.compute.claims [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 546.542667] env[63489]: DEBUG oslo_concurrency.lockutils [None req-955dec36-6afe-4dda-a8a3-10b540b68450 tempest-ServerDiagnosticsTest-2102592906 tempest-ServerDiagnosticsTest-2102592906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.545799] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c695620d-cea9-40ae-9ec9-4fdf34493c97 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.590628] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2791d0-c13e-46d2-8b99-25e621fca8f4 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.599602] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246a995a-3146-4684-a893-92a87a77b1ee {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.615034] env[63489]: DEBUG nova.compute.provider_tree [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.748975] env[63489]: INFO nova.scheduler.client.report [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Deleted allocations for instance 9bb676ad-a339-4a05-9a66-3bb817543156 [ 546.943214] env[63489]: DEBUG oslo_concurrency.lockutils [req-585b71d8-67ec-4ca4-b825-7e6a4d515640 req-4b3bfa55-0992-4f9e-817a-24dae7a174b7 service nova] Releasing lock "refresh_cache-bd337b87-0c9e-44eb-81bf-572610ac5680" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.984944] env[63489]: DEBUG oslo_concurrency.lockutils [None req-0c93cb9c-e982-4a94-adfd-6f514c076cdd tempest-ListImageFiltersTestJSON-1130463695 tempest-ListImageFiltersTestJSON-1130463695-project-member] Acquiring lock "2e18b20e-6317-403b-9ed8-23d987d119bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.985687] env[63489]: DEBUG oslo_concurrency.lockutils [None req-0c93cb9c-e982-4a94-adfd-6f514c076cdd tempest-ListImageFiltersTestJSON-1130463695 tempest-ListImageFiltersTestJSON-1130463695-project-member] Lock "2e18b20e-6317-403b-9ed8-23d987d119bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.039637] env[63489]: DEBUG nova.network.neutron [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.120685] env[63489]: DEBUG nova.scheduler.client.report [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.260222] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c3d386e8-326c-42ef-88f7-46e5c9a84db6 tempest-ServerDiagnosticsNegativeTest-130342942 tempest-ServerDiagnosticsNegativeTest-130342942-project-member] Lock "9bb676ad-a339-4a05-9a66-3bb817543156" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.467s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.543066] env[63489]: INFO nova.compute.manager [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] [instance: 44c8d268-4c23-4b85-915d-3c708586046e] Took 1.04 seconds to deallocate network for instance. [ 547.626861] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.428s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.628144] env[63489]: DEBUG nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 547.632106] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.573s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.763553] env[63489]: DEBUG nova.compute.manager [None req-71fac178-826e-4401-897d-73c7435935d1 tempest-VolumesAssistedSnapshotsTest-130347173 tempest-VolumesAssistedSnapshotsTest-130347173-project-member] [instance: 27c8104f-a58f-4416-8a3b-d9be8ca6533b] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 547.904207] env[63489]: DEBUG oslo_concurrency.lockutils [None req-946f6980-3fd4-4ab8-95ec-a8bfb69f5bb3 tempest-ServersTestFqdnHostnames-1363496083 tempest-ServersTestFqdnHostnames-1363496083-project-member] Acquiring lock "62051474-3b8a-49ed-ac5d-08b9b8a730e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.904389] env[63489]: DEBUG oslo_concurrency.lockutils [None req-946f6980-3fd4-4ab8-95ec-a8bfb69f5bb3 tempest-ServersTestFqdnHostnames-1363496083 tempest-ServersTestFqdnHostnames-1363496083-project-member] Lock "62051474-3b8a-49ed-ac5d-08b9b8a730e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.141869] env[63489]: DEBUG nova.compute.utils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 548.143599] env[63489]: DEBUG nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 548.143752] env[63489]: DEBUG nova.network.neutron [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 548.214641] env[63489]: INFO nova.compute.manager [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Rebuilding instance [ 548.262509] env[63489]: DEBUG nova.compute.manager [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Checking state {{(pid=63489) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 548.262509] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f278b26-6a28-4870-9c23-ddecdb37ee2d {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.290672] env[63489]: DEBUG oslo_concurrency.lockutils [None req-71fac178-826e-4401-897d-73c7435935d1 tempest-VolumesAssistedSnapshotsTest-130347173 tempest-VolumesAssistedSnapshotsTest-130347173-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.303852] env[63489]: DEBUG nova.policy [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '353f96552dce492ba685ead4cf7a435f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fe9be8213ec4e01ae58e1a5acfac0a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 548.439977] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a86357-2dfa-4904-aa3c-4bca8dc2dde8 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.450199] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7247a75-cfab-4b73-ba34-6b079149c8b3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.487247] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439a9b9b-3ba8-4c1b-8b25-a9b2708f69f2 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.496539] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99525f89-7da0-420a-8e68-901d1921e4f2 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.518612] env[63489]: DEBUG nova.compute.provider_tree [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.596824] env[63489]: INFO nova.scheduler.client.report [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Deleted allocations for instance 44c8d268-4c23-4b85-915d-3c708586046e [ 548.650120] env[63489]: DEBUG nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 548.783019] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powering off the VM {{(pid=63489) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 548.783019] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ad45b00-2318-4114-9806-0eda1ca458a3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.792486] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 548.792486] env[63489]: value = "task-1050214" [ 548.792486] env[63489]: _type = "Task" [ 548.792486] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.804141] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.990222] env[63489]: DEBUG nova.compute.manager [req-4d22fc25-5d25-4660-9e89-d3ad40fdc79b req-5251a108-1968-4fdf-9201-ab374002c226 service nova] [instance: bd337b87-0c9e-44eb-81bf-572610ac5680] Received event network-vif-deleted-9f04c3d1-906b-4924-abce-e6761a8c090d {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 549.022536] env[63489]: DEBUG nova.scheduler.client.report [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.108021] env[63489]: DEBUG oslo_concurrency.lockutils [None req-ef795750-c6bf-40b9-a5ee-a5fafae13460 tempest-ServerActionsTestOtherA-2038364936 tempest-ServerActionsTestOtherA-2038364936-project-member] Lock "44c8d268-4c23-4b85-915d-3c708586046e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.887s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.121752] env[63489]: DEBUG oslo_concurrency.lockutils [None req-fb2d0a77-f1d7-41f3-9da5-16b82ee8049f tempest-ListImageFiltersTestJSON-1130463695 tempest-ListImageFiltersTestJSON-1130463695-project-member] Acquiring lock "bebdcb44-58c3-46a7-a6f9-e82ba8ad85e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.122053] env[63489]: DEBUG oslo_concurrency.lockutils [None req-fb2d0a77-f1d7-41f3-9da5-16b82ee8049f tempest-ListImageFiltersTestJSON-1130463695 tempest-ListImageFiltersTestJSON-1130463695-project-member] Lock "bebdcb44-58c3-46a7-a6f9-e82ba8ad85e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.132870] env[63489]: DEBUG nova.network.neutron [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Successfully created port: e3b46887-eeab-4954-aeb7-9eda78df11db {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.304194] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050214, 'name': PowerOffVM_Task, 'duration_secs': 0.13643} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.304509] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powered off the VM {{(pid=63489) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 549.304977] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 549.306048] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c549490b-febe-4aba-bca0-39e2a280e22f {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.319399] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Unregistering the VM {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 549.319604] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36cb46a1-4643-4303-99dd-cc3a4eab8ebc {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.352855] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Unregistered the VM {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 549.352855] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deleting contents of the VM from datastore datastore2 {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 549.353058] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Deleting the datastore file [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506 {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 549.354038] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ed3e036-fe4e-4991-8eb3-ec6068d70168 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.361421] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 549.361421] env[63489]: value = "task-1050216" [ 549.361421] env[63489]: _type = "Task" [ 549.361421] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.370959] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.531950] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.532621] env[63489]: ERROR nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Traceback (most recent call last): [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self.driver.spawn(context, instance, image_meta, [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] vm_ref = self.build_virtual_machine(instance, [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.532621] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] for vif in network_info: [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return self._sync_wrapper(fn, *args, **kwargs) [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self.wait() [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self[:] = self._gt.wait() [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return self._exit_event.wait() [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] result = hub.switch() [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.535566] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return self.greenlet.switch() [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] result = function(*args, **kwargs) [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] return func(*args, **kwargs) [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] raise e [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] nwinfo = self.network_api.allocate_for_instance( [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] created_port_ids = self._update_ports_for_instance( [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] with excutils.save_and_reraise_exception(): [ 549.536124] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] self.force_reraise() [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] raise self.value [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] updated_port = self._update_port( [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] _ensure_no_port_binding_failure(port) [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] raise exception.PortBindingFailed(port_id=port['id']) [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] nova.exception.PortBindingFailed: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. [ 549.536534] env[63489]: ERROR nova.compute.manager [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] [ 549.536991] env[63489]: DEBUG nova.compute.utils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 549.536991] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Build of instance 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f was re-scheduled: Binding failed for port 7476d068-710a-4b58-be40-117338c13436, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 549.536991] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 549.536991] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquiring lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.537203] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquired lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.537203] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 549.537203] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.205s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.613864] env[63489]: DEBUG nova.compute.manager [None req-111ccdb5-1ea1-4c84-ae3c-3e22109cca28 tempest-AttachInterfacesTestJSON-1102712621 tempest-AttachInterfacesTestJSON-1102712621-project-member] [instance: 861a32b2-1a63-4fc5-9151-73993788e0f4] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 549.665125] env[63489]: DEBUG nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 549.694457] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.694457] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.694457] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.694603] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.694603] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.694603] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.694603] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.694603] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.694901] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.695244] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.695530] env[63489]: DEBUG nova.virt.hardware [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.696734] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67f61f7-b8cb-4e72-861c-5354910cd215 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.706992] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39e47d4-b940-42f3-b034-3351c041fa64 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.881192] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090767} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.881593] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Deleted the datastore file {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 549.882745] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deleted contents of the VM from datastore datastore2 {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 549.882745] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 550.097199] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.135098] env[63489]: DEBUG oslo_concurrency.lockutils [None req-111ccdb5-1ea1-4c84-ae3c-3e22109cca28 tempest-AttachInterfacesTestJSON-1102712621 tempest-AttachInterfacesTestJSON-1102712621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.287800] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.311288] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668862e1-bbd8-4e0d-aa6c-c3f26549b20c {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.321058] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d52bd23-8237-4773-a928-1bf33a978732 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.354386] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5816b2-b7fe-4651-b738-b2686f49a07e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.363228] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce34339-2e27-4bc6-a62a-d6c5a8ac0c24 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.378856] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.794037] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Releasing lock "refresh_cache-91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.794037] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 550.794037] env[63489]: DEBUG nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.794037] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.908192] env[63489]: ERROR nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [req-531d5b4b-9a52-44b9-a8c6-70a900a7aaac] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-531d5b4b-9a52-44b9-a8c6-70a900a7aaac"}]}: nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 550.919240] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:13:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 550.921688] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 550.921688] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 550.921688] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 550.921688] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 550.921688] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 550.921688] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 550.921998] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 550.921998] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 550.921998] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 550.921998] env[63489]: DEBUG nova.virt.hardware [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 550.924911] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c00ea8-8ee9-4f69-ad03-2489f6c2d7ac {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.929443] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 550.936756] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1508e9cd-5f06-45dc-929a-e18e024c0000 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.955821] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance VIF info [] {{(pid=63489) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 550.961715] env[63489]: DEBUG oslo.service.loopingcall [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.962683] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 550.962996] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.964868] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Creating VM on the ESX host {{(pid=63489) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 550.965391] env[63489]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7831912-b133-4ef3-b045-fff23a84a929 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.978376] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: 329d9348-6e96-4ec6-bc29-10a609bd7826 {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 550.980874] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.989075] env[63489]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 550.989075] env[63489]: value = "task-1050217" [ 550.989075] env[63489]: _type = "Task" [ 550.989075] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.002085] env[63489]: DEBUG oslo_vmware.api [-] Task: {'id': task-1050217, 'name': CreateVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.003520] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 551.140333] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d9600bca-c681-491c-8ab7-e00d6fd2a783 tempest-ServerActionsTestOtherB-1939697221 tempest-ServerActionsTestOtherB-1939697221-project-member] Acquiring lock "2d6f9948-169b-4cb3-a390-6d86c7cb0de5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.140448] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d9600bca-c681-491c-8ab7-e00d6fd2a783 tempest-ServerActionsTestOtherB-1939697221 tempest-ServerActionsTestOtherB-1939697221-project-member] Lock "2d6f9948-169b-4cb3-a390-6d86c7cb0de5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.452255] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efaf653-6e95-4b9a-a290-6ffa04f2fdea {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.462607] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93316979-3d92-4ac0-8f2b-1f8455efdd08 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.506399] env[63489]: DEBUG nova.network.neutron [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.511125] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17f200d-5c4a-4545-b1d4-ae68b7b497a9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.523314] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2987dc3-d306-42f8-9798-c91665efb95a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.527893] env[63489]: DEBUG oslo_vmware.api [-] Task: {'id': task-1050217, 'name': CreateVM_Task, 'duration_secs': 0.375622} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.528674] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Created VM on the ESX host {{(pid=63489) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 551.528888] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.529072] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.529418] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 551.529876] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abefda70-0605-477d-9364-182eac6c9f2f {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.540168] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 551.545850] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 551.545850] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]52644ee2-d453-da4f-c86b-bac9333c7716" [ 551.545850] env[63489]: _type = "Task" [ 551.545850] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.555905] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52644ee2-d453-da4f-c86b-bac9333c7716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.014988] env[63489]: INFO nova.compute.manager [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] [instance: 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f] Took 1.22 seconds to deallocate network for instance. [ 552.063473] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52644ee2-d453-da4f-c86b-bac9333c7716, 'name': SearchDatastore_Task, 'duration_secs': 0.01042} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.063730] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.065183] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Processing image 314454d0-cea8-4ac2-8a2e-d19d8731016c {{(pid=63489) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 552.065183] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.065183] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.065183] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 552.065183] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8212a65-f363-49f0-9b2f-ec60368360c3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.069272] env[63489]: ERROR nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [req-88929ef2-2851-4db6-a00d-604bc1acfe24] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-88929ef2-2851-4db6-a00d-604bc1acfe24"}]}: nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 552.081322] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=63489) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 552.081542] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=63489) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 552.082572] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29167dc0-9634-4a8f-a890-179206044964 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.089585] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 552.089585] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]5206ff44-290a-54e6-ad00-faa20e4cc75a" [ 552.089585] env[63489]: _type = "Task" [ 552.089585] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.100767] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]5206ff44-290a-54e6-ad00-faa20e4cc75a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.101802] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 552.119500] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 552.119500] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.141305] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 552.175144] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 552.312268] env[63489]: ERROR nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e3b46887-eeab-4954-aeb7-9eda78df11db, please check neutron logs for more information. [ 552.312268] env[63489]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.312268] env[63489]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.312268] env[63489]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.312268] env[63489]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.312268] env[63489]: ERROR nova.compute.manager self.force_reraise() [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.312268] env[63489]: ERROR nova.compute.manager raise self.value [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.312268] env[63489]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.312268] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.312268] env[63489]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.312787] env[63489]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.312787] env[63489]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.312787] env[63489]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e3b46887-eeab-4954-aeb7-9eda78df11db, please check neutron logs for more information. [ 552.312787] env[63489]: ERROR nova.compute.manager [ 552.312787] env[63489]: Traceback (most recent call last): [ 552.312787] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.312787] env[63489]: listener.cb(fileno) [ 552.312787] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.312787] env[63489]: result = function(*args, **kwargs) [ 552.312787] env[63489]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.312787] env[63489]: return func(*args, **kwargs) [ 552.312787] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.312787] env[63489]: raise e [ 552.312787] env[63489]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.312787] env[63489]: nwinfo = self.network_api.allocate_for_instance( [ 552.312787] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.312787] env[63489]: created_port_ids = self._update_ports_for_instance( [ 552.312787] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.312787] env[63489]: with excutils.save_and_reraise_exception(): [ 552.312787] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.312787] env[63489]: self.force_reraise() [ 552.312787] env[63489]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.312787] env[63489]: raise self.value [ 552.312787] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.312787] env[63489]: updated_port = self._update_port( [ 552.312787] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.312787] env[63489]: _ensure_no_port_binding_failure(port) [ 552.312787] env[63489]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.312787] env[63489]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.313725] env[63489]: nova.exception.PortBindingFailed: Binding failed for port e3b46887-eeab-4954-aeb7-9eda78df11db, please check neutron logs for more information. [ 552.313725] env[63489]: Removing descriptor: 15 [ 552.313725] env[63489]: ERROR nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e3b46887-eeab-4954-aeb7-9eda78df11db, please check neutron logs for more information. [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Traceback (most recent call last): [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] yield resources [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] self.driver.spawn(context, instance, image_meta, [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.313725] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] vm_ref = self.build_virtual_machine(instance, [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] for vif in network_info: [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] return self._sync_wrapper(fn, *args, **kwargs) [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] self.wait() [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] self[:] = self._gt.wait() [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] return self._exit_event.wait() [ 552.314120] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] result = hub.switch() [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] return self.greenlet.switch() [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] result = function(*args, **kwargs) [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] return func(*args, **kwargs) [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] raise e [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] nwinfo = self.network_api.allocate_for_instance( [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.314485] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] created_port_ids = self._update_ports_for_instance( [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] with excutils.save_and_reraise_exception(): [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] self.force_reraise() [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] raise self.value [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] updated_port = self._update_port( [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] _ensure_no_port_binding_failure(port) [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.314859] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] raise exception.PortBindingFailed(port_id=port['id']) [ 552.315251] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] nova.exception.PortBindingFailed: Binding failed for port e3b46887-eeab-4954-aeb7-9eda78df11db, please check neutron logs for more information. [ 552.315251] env[63489]: ERROR nova.compute.manager [instance: 112519dc-e533-4428-a235-5bef1fd4acae] [ 552.315251] env[63489]: INFO nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Terminating instance [ 552.322389] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Acquiring lock "refresh_cache-112519dc-e533-4428-a235-5bef1fd4acae" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.322553] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Acquired lock "refresh_cache-112519dc-e533-4428-a235-5bef1fd4acae" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.322715] env[63489]: DEBUG nova.network.neutron [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.539704] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c1a9bb-1d56-41c4-825f-7c93fe90bac5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.549512] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c725b8-60b3-493a-bb68-a808ac0991ab {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.583166] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5b19eb-867b-408f-a11b-09b53be3dc24 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.595531] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbbe021-8f59-43db-8378-26894ac99211 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.605844] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]5206ff44-290a-54e6-ad00-faa20e4cc75a, 'name': SearchDatastore_Task, 'duration_secs': 0.009833} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.618701] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.621751] env[63489]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e4d8151-bd44-47a1-9081-120d27a24619 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.632141] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 552.632141] env[63489]: value = "session[52b09b51-c6c1-0127-75d0-635804e5982d]52136816-a614-7974-2b53-91f0fb1fcfc7" [ 552.632141] env[63489]: _type = "Task" [ 552.632141] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.643454] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': session[52b09b51-c6c1-0127-75d0-635804e5982d]52136816-a614-7974-2b53-91f0fb1fcfc7, 'name': SearchDatastore_Task, 'duration_secs': 0.012361} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.643992] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.644579] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk to [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 552.644579] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28c87779-da15-4055-8c54-34d12de99870 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.653195] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 552.653195] env[63489]: value = "task-1050221" [ 552.653195] env[63489]: _type = "Task" [ 552.653195] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.666606] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.853112] env[63489]: DEBUG nova.network.neutron [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.938136] env[63489]: DEBUG nova.compute.manager [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Received event network-changed-e3b46887-eeab-4954-aeb7-9eda78df11db {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 552.938719] env[63489]: DEBUG nova.compute.manager [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Refreshing instance network info cache due to event network-changed-e3b46887-eeab-4954-aeb7-9eda78df11db. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 552.938972] env[63489]: DEBUG oslo_concurrency.lockutils [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] Acquiring lock "refresh_cache-112519dc-e533-4428-a235-5bef1fd4acae" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.975469] env[63489]: DEBUG nova.network.neutron [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.074980] env[63489]: INFO nova.scheduler.client.report [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Deleted allocations for instance 91c27ddb-4cf4-4c4b-ace3-6f542745ba2f [ 553.169532] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050221, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.182190] env[63489]: DEBUG nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updated inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with generation 29 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 553.182412] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde generation from 29 to 30 during operation: update_inventory {{(pid=63489) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 553.182588] env[63489]: DEBUG nova.compute.provider_tree [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.477123] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Releasing lock "refresh_cache-112519dc-e533-4428-a235-5bef1fd4acae" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.477550] env[63489]: DEBUG nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 553.477761] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 553.478153] env[63489]: DEBUG oslo_concurrency.lockutils [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] Acquired lock "refresh_cache-112519dc-e533-4428-a235-5bef1fd4acae" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.478325] env[63489]: DEBUG nova.network.neutron [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Refreshing network info cache for port e3b46887-eeab-4954-aeb7-9eda78df11db {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 553.480454] env[63489]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ae71e95-130b-453e-b7a2-36a77a94d8f1 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.493161] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6faf0ea-755c-482d-ba87-c2311d83b84c {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.521914] env[63489]: WARNING nova.virt.vmwareapi.vmops [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 112519dc-e533-4428-a235-5bef1fd4acae could not be found. [ 553.521914] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 553.521914] env[63489]: INFO nova.compute.manager [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 553.522272] env[63489]: DEBUG oslo.service.loopingcall [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.523080] env[63489]: DEBUG nova.compute.manager [-] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.523080] env[63489]: DEBUG nova.network.neutron [-] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 553.561017] env[63489]: DEBUG nova.network.neutron [-] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.598812] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a35e0dda-bf10-48f6-b3b2-9c0cddf75343 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "91c27ddb-4cf4-4c4b-ace3-6f542745ba2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.030s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.669764] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727142} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.670058] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/314454d0-cea8-4ac2-8a2e-d19d8731016c/314454d0-cea8-4ac2-8a2e-d19d8731016c.vmdk to [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk {{(pid=63489) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 553.670287] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Extending root virtual disk to 1048576 {{(pid=63489) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 553.670556] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7e2cb0b-986a-4cdf-950e-93b1e7377abe {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.681235] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 553.681235] env[63489]: value = "task-1050222" [ 553.681235] env[63489]: _type = "Task" [ 553.681235] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.691174] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 4.154s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.691621] env[63489]: ERROR nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Traceback (most recent call last): [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self.driver.spawn(context, instance, image_meta, [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] vm_ref = self.build_virtual_machine(instance, [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.691621] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] for vif in network_info: [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return self._sync_wrapper(fn, *args, **kwargs) [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self.wait() [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self[:] = self._gt.wait() [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return self._exit_event.wait() [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] result = hub.switch() [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.692037] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return self.greenlet.switch() [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] result = function(*args, **kwargs) [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] return func(*args, **kwargs) [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] raise e [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] nwinfo = self.network_api.allocate_for_instance( [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] created_port_ids = self._update_ports_for_instance( [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] with excutils.save_and_reraise_exception(): [ 553.693386] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] self.force_reraise() [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] raise self.value [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] updated_port = self._update_port( [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] _ensure_no_port_binding_failure(port) [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] raise exception.PortBindingFailed(port_id=port['id']) [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] nova.exception.PortBindingFailed: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. [ 553.693736] env[63489]: ERROR nova.compute.manager [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] [ 553.694052] env[63489]: DEBUG nova.compute.utils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 553.694330] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Build of instance 5373362e-671e-44f4-8c0b-8396a3925c12 was re-scheduled: Binding failed for port addaad04-c9d7-4994-842b-b3501824f997, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 553.695552] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 553.695552] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquiring lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.695552] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquired lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.695552] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.699968] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.700240] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.074s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.038481] env[63489]: DEBUG nova.network.neutron [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.065344] env[63489]: DEBUG nova.network.neutron [-] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.101480] env[63489]: DEBUG nova.compute.manager [None req-df86fdda-cf84-4a3e-9181-3d9bb58a7623 tempest-ServerDiskConfigTestJSON-438976484 tempest-ServerDiskConfigTestJSON-438976484-project-member] [instance: 0a6b42d7-a77c-4047-bf7d-17a31872d955] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 554.193353] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148279} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.194311] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Extended root virtual disk {{(pid=63489) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 554.194785] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e901f3-94cb-4951-91ea-ddd424aa8b90 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.231288] env[63489]: DEBUG nova.virt.vmwareapi.volumeops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk or device None with type sparse {{(pid=63489) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 554.233445] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c267a7d0-b33d-4d17-b3d8-edf11a5301d3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.262201] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 554.262201] env[63489]: value = "task-1050223" [ 554.262201] env[63489]: _type = "Task" [ 554.262201] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.284463] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050223, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.313695] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.431625] env[63489]: DEBUG nova.network.neutron [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.440536] env[63489]: DEBUG oslo_concurrency.lockutils [None req-bb9201c5-8fae-4c6e-bbae-7d3d21643134 tempest-ListServerFiltersTestJSON-1472602063 tempest-ListServerFiltersTestJSON-1472602063-project-member] Acquiring lock "9fbf6218-8ad3-43a5-a43e-f8f76da8e68a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.440763] env[63489]: DEBUG oslo_concurrency.lockutils [None req-bb9201c5-8fae-4c6e-bbae-7d3d21643134 tempest-ListServerFiltersTestJSON-1472602063 tempest-ListServerFiltersTestJSON-1472602063-project-member] Lock "9fbf6218-8ad3-43a5-a43e-f8f76da8e68a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.568823] env[63489]: INFO nova.compute.manager [-] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Took 1.05 seconds to deallocate network for instance. [ 554.572096] env[63489]: DEBUG nova.compute.claims [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Aborting claim: {{(pid=63489) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 554.574801] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b3db6e1b-2942-452b-bdf0-7be56a3056aa tempest-ImagesOneServerTestJSON-1066705924 tempest-ImagesOneServerTestJSON-1066705924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.629703] env[63489]: DEBUG oslo_concurrency.lockutils [None req-df86fdda-cf84-4a3e-9181-3d9bb58a7623 tempest-ServerDiskConfigTestJSON-438976484 tempest-ServerDiskConfigTestJSON-438976484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.757868] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 013f2d36-9578-45d2-aff0-170b5fd97506 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.787449] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.867279] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.944889] env[63489]: DEBUG oslo_concurrency.lockutils [req-354aaa6c-7c11-4217-aa13-afed95e3fd5c req-cf3302af-5bdf-403a-ab4b-91f8e97e9af8 service nova] Releasing lock "refresh_cache-112519dc-e533-4428-a235-5bef1fd4acae" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.269249] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 5373362e-671e-44f4-8c0b-8396a3925c12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.273031] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 5e32d6c0-d943-416f-9a54-e3511c933ca9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.273031] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 276c32a2-da0f-420f-a9f2-b13c1fd62586 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.273031] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 81e42a2c-fb30-42e2-a2a9-45f3184739e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.273031] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance bd337b87-0c9e-44eb-81bf-572610ac5680 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.273305] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 112519dc-e533-4428-a235-5bef1fd4acae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.290154] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050223, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.371996] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Releasing lock "refresh_cache-5373362e-671e-44f4-8c0b-8396a3925c12" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.371996] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 555.371996] env[63489]: DEBUG nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 555.371996] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.374386] env[63489]: DEBUG oslo_concurrency.lockutils [None req-05074c54-b3d1-47e2-8933-0f6bc1fc487c tempest-FloatingIPsAssociationNegativeTestJSON-1453501459 tempest-FloatingIPsAssociationNegativeTestJSON-1453501459-project-member] Acquiring lock "b088fa1f-45d0-40d5-82eb-3b015d804d13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.374737] env[63489]: DEBUG oslo_concurrency.lockutils [None req-05074c54-b3d1-47e2-8933-0f6bc1fc487c tempest-FloatingIPsAssociationNegativeTestJSON-1453501459 tempest-FloatingIPsAssociationNegativeTestJSON-1453501459-project-member] Lock "b088fa1f-45d0-40d5-82eb-3b015d804d13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.424951] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.780754] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 1c440617-c0ec-485b-a2cc-cd0c8a9d60df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.787768] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050223, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.928360] env[63489]: DEBUG nova.network.neutron [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.289617] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 27c8104f-a58f-4416-8a3b-d9be8ca6533b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.290898] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050223, 'name': ReconfigVM_Task, 'duration_secs': 1.700735} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.291735] env[63489]: DEBUG nova.virt.vmwareapi.volumeops [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506/013f2d36-9578-45d2-aff0-170b5fd97506.vmdk or device None with type sparse {{(pid=63489) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 556.292849] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d179003d-cb48-4056-907a-49f52106cfad {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.306716] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 556.306716] env[63489]: value = "task-1050225" [ 556.306716] env[63489]: _type = "Task" [ 556.306716] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.318862] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050225, 'name': Rename_Task} progress is 5%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.432773] env[63489]: INFO nova.compute.manager [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] [instance: 5373362e-671e-44f4-8c0b-8396a3925c12] Took 1.06 seconds to deallocate network for instance. [ 556.445721] env[63489]: DEBUG oslo_concurrency.lockutils [None req-460ab623-0566-44f0-b8a1-f5b738edfc81 tempest-ServerExternalEventsTest-526711440 tempest-ServerExternalEventsTest-526711440-project-member] Acquiring lock "121ca665-20cc-4cda-a3af-1cc3f94a3324" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.446118] env[63489]: DEBUG oslo_concurrency.lockutils [None req-460ab623-0566-44f0-b8a1-f5b738edfc81 tempest-ServerExternalEventsTest-526711440 tempest-ServerExternalEventsTest-526711440-project-member] Lock "121ca665-20cc-4cda-a3af-1cc3f94a3324" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.480897] env[63489]: DEBUG nova.compute.manager [req-f347d83c-0153-4522-9184-962e26507f74 req-f52c4952-1ee9-4584-846e-cfd72cfaf827 service nova] [instance: 112519dc-e533-4428-a235-5bef1fd4acae] Received event network-vif-deleted-e3b46887-eeab-4954-aeb7-9eda78df11db {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 556.793626] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 861a32b2-1a63-4fc5-9151-73993788e0f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.820664] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050225, 'name': Rename_Task, 'duration_secs': 0.260881} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.820664] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powering on the VM {{(pid=63489) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 556.820830] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec859d44-68b2-4e71-bcb4-9b42f42cf405 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.828613] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Waiting for the task: (returnval){ [ 556.828613] env[63489]: value = "task-1050226" [ 556.828613] env[63489]: _type = "Task" [ 556.828613] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.838749] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050226, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.900069] env[63489]: DEBUG oslo_concurrency.lockutils [None req-0df7b8f1-b47b-441f-9481-46ba79069580 tempest-ListServerFiltersTestJSON-1472602063 tempest-ListServerFiltersTestJSON-1472602063-project-member] Acquiring lock "1660d898-e40d-4189-9f89-f15ac5d65024" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.900342] env[63489]: DEBUG oslo_concurrency.lockutils [None req-0df7b8f1-b47b-441f-9481-46ba79069580 tempest-ListServerFiltersTestJSON-1472602063 tempest-ListServerFiltersTestJSON-1472602063-project-member] Lock "1660d898-e40d-4189-9f89-f15ac5d65024" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.299862] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 0a6b42d7-a77c-4047-bf7d-17a31872d955 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.345378] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050226, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.494502] env[63489]: INFO nova.scheduler.client.report [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Deleted allocations for instance 5373362e-671e-44f4-8c0b-8396a3925c12 [ 557.804088] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 08d4467f-be74-4306-bbc6-2ee62c5fa136 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.845920] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050226, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.016341] env[63489]: DEBUG oslo_concurrency.lockutils [None req-635d8f4f-2126-40b6-8845-1bb811aefbfd tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "5373362e-671e-44f4-8c0b-8396a3925c12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.604s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.055466] env[63489]: DEBUG oslo_concurrency.lockutils [None req-cb5ca186-a096-434f-a483-8197586c342f tempest-ListServerFiltersTestJSON-1472602063 tempest-ListServerFiltersTestJSON-1472602063-project-member] Acquiring lock "9c1cfd6f-7f7a-4548-9f90-9dae20136d3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.055606] env[63489]: DEBUG oslo_concurrency.lockutils [None req-cb5ca186-a096-434f-a483-8197586c342f tempest-ListServerFiltersTestJSON-1472602063 tempest-ListServerFiltersTestJSON-1472602063-project-member] Lock "9c1cfd6f-7f7a-4548-9f90-9dae20136d3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.060397] env[63489]: DEBUG oslo_concurrency.lockutils [None req-27af84af-eff3-4f15-9a14-c5e9ce88512f tempest-VolumesAdminNegativeTest-47410898 tempest-VolumesAdminNegativeTest-47410898-project-member] Acquiring lock "f995a598-09db-4bad-8df5-7123adc45142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.060397] env[63489]: DEBUG oslo_concurrency.lockutils [None req-27af84af-eff3-4f15-9a14-c5e9ce88512f tempest-VolumesAdminNegativeTest-47410898 tempest-VolumesAdminNegativeTest-47410898-project-member] Lock "f995a598-09db-4bad-8df5-7123adc45142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.310902] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 1e26edbe-4f6a-4e14-af55-48888910eb9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.343441] env[63489]: DEBUG oslo_vmware.api [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Task: {'id': task-1050226, 'name': PowerOnVM_Task, 'duration_secs': 1.098796} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.343845] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powered on the VM {{(pid=63489) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 558.344072] env[63489]: DEBUG nova.compute.manager [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Checking state {{(pid=63489) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 558.345033] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d389238d-89b3-42da-b617-ba1dc8e7dd2c {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.525393] env[63489]: DEBUG nova.compute.manager [None req-8b49f2a4-16a9-48d6-9b2f-854ddd3cfc2e tempest-ServersTestJSON-808274608 tempest-ServersTestJSON-808274608-project-member] [instance: 08d4467f-be74-4306-bbc6-2ee62c5fa136] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 558.814616] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 1c9f6ec2-07a8-420b-9fa9-08bc4f94400f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.867971] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1648c2f3-994b-45fa-8a57-1da460261f90 tempest-ServersAdmin275Test-348115069 tempest-ServersAdmin275Test-348115069-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.065437] env[63489]: DEBUG oslo_concurrency.lockutils [None req-8b49f2a4-16a9-48d6-9b2f-854ddd3cfc2e tempest-ServersTestJSON-808274608 tempest-ServersTestJSON-808274608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.319973] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 2e18b20e-6317-403b-9ed8-23d987d119bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.748267] env[63489]: DEBUG oslo_concurrency.lockutils [None req-8bcd8ee0-a64b-419b-ae38-a92acadb93e3 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Acquiring lock "8d0a4c58-a0cc-485d-a642-58f111011253" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.749175] env[63489]: DEBUG oslo_concurrency.lockutils [None req-8bcd8ee0-a64b-419b-ae38-a92acadb93e3 tempest-DeleteServersAdminTestJSON-2010203047 tempest-DeleteServersAdminTestJSON-2010203047-project-member] Lock "8d0a4c58-a0cc-485d-a642-58f111011253" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.824435] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 62051474-3b8a-49ed-ac5d-08b9b8a730e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.918556] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "013f2d36-9578-45d2-aff0-170b5fd97506" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.919049] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "013f2d36-9578-45d2-aff0-170b5fd97506" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.919258] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "013f2d36-9578-45d2-aff0-170b5fd97506-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.919480] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "013f2d36-9578-45d2-aff0-170b5fd97506-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.919646] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "013f2d36-9578-45d2-aff0-170b5fd97506-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.922277] env[63489]: INFO nova.compute.manager [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Terminating instance [ 559.924274] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "refresh_cache-013f2d36-9578-45d2-aff0-170b5fd97506" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.924475] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquired lock "refresh_cache-013f2d36-9578-45d2-aff0-170b5fd97506" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.924682] env[63489]: DEBUG nova.network.neutron [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.326700] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance bebdcb44-58c3-46a7-a6f9-e82ba8ad85e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.453812] env[63489]: DEBUG nova.network.neutron [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.545029] env[63489]: DEBUG nova.network.neutron [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.832029] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 2d6f9948-169b-4cb3-a390-6d86c7cb0de5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.049198] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Releasing lock "refresh_cache-013f2d36-9578-45d2-aff0-170b5fd97506" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.052080] env[63489]: DEBUG nova.compute.manager [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Start destroying the instance on the hypervisor. {{(pid=63489) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 561.052080] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Destroying instance {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 561.052080] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e80d614-5e1a-494d-8502-e9d5be16580e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.064169] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powering off the VM {{(pid=63489) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 561.064169] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80939fcf-0b56-45b0-be08-e38864b70bb5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.075739] env[63489]: DEBUG oslo_vmware.api [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 561.075739] env[63489]: value = "task-1050229" [ 561.075739] env[63489]: _type = "Task" [ 561.075739] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.085993] env[63489]: DEBUG oslo_vmware.api [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.337989] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Instance 9fbf6218-8ad3-43a5-a43e-f8f76da8e68a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63489) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.338485] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=63489) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 561.341026] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=63489) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 561.357472] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1b6871cc-20a5-4f77-9ad6-718673b602c0 tempest-AttachInterfacesUnderV243Test-824815886 tempest-AttachInterfacesUnderV243Test-824815886-project-member] Acquiring lock "73b8a9a3-288a-4fb8-97d1-f2f8339426f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.358025] env[63489]: DEBUG oslo_concurrency.lockutils [None req-1b6871cc-20a5-4f77-9ad6-718673b602c0 tempest-AttachInterfacesUnderV243Test-824815886 tempest-AttachInterfacesUnderV243Test-824815886-project-member] Lock "73b8a9a3-288a-4fb8-97d1-f2f8339426f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.595522] env[63489]: DEBUG oslo_vmware.api [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050229, 'name': PowerOffVM_Task, 'duration_secs': 0.219581} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.595522] env[63489]: DEBUG nova.virt.vmwareapi.vm_util [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Powered off the VM {{(pid=63489) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 561.595522] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Unregistering the VM {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 561.595522] env[63489]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d143c63-678d-41d2-b305-1581c06929a9 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.633383] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Unregistered the VM {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 561.634024] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deleting contents of the VM from datastore datastore2 {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 561.634462] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleting the datastore file [datastore2] 013f2d36-9578-45d2-aff0-170b5fd97506 {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 561.634797] env[63489]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3fdbbbf-a9f2-4aa1-967f-453ff54fa2c3 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.649841] env[63489]: DEBUG oslo_vmware.api [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for the task: (returnval){ [ 561.649841] env[63489]: value = "task-1050232" [ 561.649841] env[63489]: _type = "Task" [ 561.649841] env[63489]: } to complete. {{(pid=63489) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.661055] env[63489]: DEBUG oslo_vmware.api [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.812890] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94fa011-f971-4ea5-8bd8-1e62e809fdce {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.821896] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c912928-d287-40cb-ae78-9e6e0f1443c8 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.859733] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6ae018-c87c-4550-a2e0-45dbf0db4bdb {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.869824] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35421521-2fe7-438c-abdc-5c6193e7cff6 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.893214] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 562.163527] env[63489]: DEBUG oslo_vmware.api [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Task: {'id': task-1050232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17057} completed successfully. {{(pid=63489) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.163527] env[63489]: DEBUG nova.virt.vmwareapi.ds_util [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Deleted the datastore file {{(pid=63489) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 562.163714] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deleted contents of the VM from datastore datastore2 {{(pid=63489) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 562.164511] env[63489]: DEBUG nova.virt.vmwareapi.vmops [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance destroyed {{(pid=63489) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 562.164669] env[63489]: INFO nova.compute.manager [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Took 1.11 seconds to destroy the instance on the hypervisor. [ 562.164969] env[63489]: DEBUG oslo.service.loopingcall [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63489) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.165335] env[63489]: DEBUG nova.compute.manager [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.165335] env[63489]: DEBUG nova.network.neutron [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.182564] env[63489]: DEBUG nova.network.neutron [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.421812] env[63489]: ERROR nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [req-6bc1cc7d-8741-455c-8635-fc12cb757bd7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6bc1cc7d-8741-455c-8635-fc12cb757bd7"}]} [ 562.438358] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 562.458063] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 562.458254] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 562.473287] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 562.496324] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 562.689220] env[63489]: DEBUG nova.network.neutron [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.886934] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02c1d8e-4633-40f4-b95e-cd5f5ad7ffe0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.895317] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7afa67-e554-4db5-84d3-802573137d7b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.929160] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598d0b43-1ddd-462a-bb65-279beecc67b6 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.937741] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdf02b4-e029-401f-90a4-09257b3f6fbb {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.954183] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 563.195241] env[63489]: INFO nova.compute.manager [-] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Took 1.03 seconds to deallocate network for instance. [ 563.481694] env[63489]: ERROR nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] [req-1af31c25-f86f-4bea-8fa5-aa8a152b805a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 6b569bc3-63ff-4af7-bc85-277940cdadde. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1af31c25-f86f-4bea-8fa5-aa8a152b805a"}]} [ 563.504967] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Refreshing inventories for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 563.532905] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating ProviderTree inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 563.533092] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 182, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 563.550169] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Refreshing aggregate associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, aggregates: None {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 563.575383] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Refreshing trait associations for resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63489) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 563.708272] env[63489]: DEBUG oslo_concurrency.lockutils [None req-770aaca1-bbe4-4e3b-85c9-77a1ac3a7b35 tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.830560] env[63489]: DEBUG oslo_concurrency.lockutils [None req-6a7244ba-e1c0-400f-92f4-ada6522d63ca tempest-ServerRescueTestJSONUnderV235-906789650 tempest-ServerRescueTestJSONUnderV235-906789650-project-member] Acquiring lock "e121720a-3109-4892-9473-ac458baef252" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.830882] env[63489]: DEBUG oslo_concurrency.lockutils [None req-6a7244ba-e1c0-400f-92f4-ada6522d63ca tempest-ServerRescueTestJSONUnderV235-906789650 tempest-ServerRescueTestJSONUnderV235-906789650-project-member] Lock "e121720a-3109-4892-9473-ac458baef252" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.995367] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f23dc32-656c-4afc-9416-8ca1897fe99f {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.003997] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f9dca8-e19f-4ee1-9cb1-951d1c1b1e68 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.038344] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02bc2e3-29c9-4b66-a2b7-9a7c8be9fe20 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.046956] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7b9c2a-8842-480f-9bde-4af47e0fd95a {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.063268] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 564.600708] env[63489]: DEBUG nova.scheduler.client.report [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updated inventory for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with generation 39 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 564.601021] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating resource provider 6b569bc3-63ff-4af7-bc85-277940cdadde generation from 39 to 40 during operation: update_inventory {{(pid=63489) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 564.601122] env[63489]: DEBUG nova.compute.provider_tree [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Updating inventory in ProviderTree for provider 6b569bc3-63ff-4af7-bc85-277940cdadde with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 565.109961] env[63489]: DEBUG nova.compute.resource_tracker [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63489) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 565.112022] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b53f3474-3d57-4e04-a78b-b051d5f87bfb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.410s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.112022] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.097s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.376527] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c1b8d73f-cbf9-4af0-980d-40d918483b92 tempest-ServersTestMultiNic-474174307 tempest-ServersTestMultiNic-474174307-project-member] Acquiring lock "426c5c3a-4450-4496-9a82-d2389cb484ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.376896] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c1b8d73f-cbf9-4af0-980d-40d918483b92 tempest-ServersTestMultiNic-474174307 tempest-ServersTestMultiNic-474174307-project-member] Lock "426c5c3a-4450-4496-9a82-d2389cb484ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.079287] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb1bca5-19de-49f6-ab49-139feaeee96f {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.087658] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf9f757-3365-4e09-b5d8-d28092a0a008 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.124574] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681bfd0d-3663-4dbd-aa34-e8cbf39c93d0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.134193] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492826f8-d1d9-4d47-b41d-75321a94f3c5 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.152700] env[63489]: DEBUG nova.compute.provider_tree [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.661023] env[63489]: DEBUG nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 567.170577] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.060s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.173032] env[63489]: ERROR nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Traceback (most recent call last): [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self.driver.spawn(context, instance, image_meta, [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] vm_ref = self.build_virtual_machine(instance, [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.173032] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] for vif in network_info: [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return self._sync_wrapper(fn, *args, **kwargs) [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self.wait() [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self[:] = self._gt.wait() [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return self._exit_event.wait() [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] result = hub.switch() [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.173506] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return self.greenlet.switch() [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] result = function(*args, **kwargs) [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] return func(*args, **kwargs) [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] raise e [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] nwinfo = self.network_api.allocate_for_instance( [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] created_port_ids = self._update_ports_for_instance( [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] with excutils.save_and_reraise_exception(): [ 567.173886] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] self.force_reraise() [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] raise self.value [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] updated_port = self._update_port( [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] _ensure_no_port_binding_failure(port) [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] raise exception.PortBindingFailed(port_id=port['id']) [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] nova.exception.PortBindingFailed: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. [ 567.174437] env[63489]: ERROR nova.compute.manager [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] [ 567.174807] env[63489]: DEBUG nova.compute.utils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 567.174807] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.416s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.179444] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Build of instance 5e32d6c0-d943-416f-9a54-e3511c933ca9 was re-scheduled: Binding failed for port c4d0be6e-bb71-426b-ab27-bfe833d526cb, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 567.180350] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 567.180641] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquiring lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.180797] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Acquired lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.180961] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.715242] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.835878] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.116468] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25065d60-1fd7-4ac5-b755-4b137b01179b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.124738] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545563e7-e696-449c-bb9f-21071d907c09 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.156201] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfdaaac-fcc4-4713-a50c-2099071ce567 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.164281] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618b3672-7f53-44bd-ba48-04df12425a2b {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.178797] env[63489]: DEBUG nova.compute.provider_tree [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.341818] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Releasing lock "refresh_cache-5e32d6c0-d943-416f-9a54-e3511c933ca9" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.341818] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 568.342525] env[63489]: DEBUG nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.342525] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 568.363220] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.401527] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b5563651-b5f1-40be-9b1f-8760dc03b67c tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Acquiring lock "a0ed9b89-7b9b-45fa-b88e-c0f12d004a8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.401747] env[63489]: DEBUG oslo_concurrency.lockutils [None req-b5563651-b5f1-40be-9b1f-8760dc03b67c tempest-SecurityGroupsTestJSON-1477286778 tempest-SecurityGroupsTestJSON-1477286778-project-member] Lock "a0ed9b89-7b9b-45fa-b88e-c0f12d004a8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.682050] env[63489]: DEBUG nova.scheduler.client.report [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.815496] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a2bb170b-ae20-4e1b-bd7a-72e68d49e23b tempest-ServerDiagnosticsV248Test-1708497290 tempest-ServerDiagnosticsV248Test-1708497290-project-member] Acquiring lock "837c71dd-1c0c-44d2-b978-9ef6ea7d18a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.815827] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a2bb170b-ae20-4e1b-bd7a-72e68d49e23b tempest-ServerDiagnosticsV248Test-1708497290 tempest-ServerDiagnosticsV248Test-1708497290-project-member] Lock "837c71dd-1c0c-44d2-b978-9ef6ea7d18a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.868371] env[63489]: DEBUG nova.network.neutron [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.186868] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.013s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.187553] env[63489]: ERROR nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Traceback (most recent call last): [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self.driver.spawn(context, instance, image_meta, [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] vm_ref = self.build_virtual_machine(instance, [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.187553] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] for vif in network_info: [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return self._sync_wrapper(fn, *args, **kwargs) [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self.wait() [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self[:] = self._gt.wait() [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return self._exit_event.wait() [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] result = hub.switch() [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.187924] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return self.greenlet.switch() [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] result = function(*args, **kwargs) [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] return func(*args, **kwargs) [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] raise e [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] nwinfo = self.network_api.allocate_for_instance( [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] created_port_ids = self._update_ports_for_instance( [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] with excutils.save_and_reraise_exception(): [ 569.188319] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] self.force_reraise() [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] raise self.value [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] updated_port = self._update_port( [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] _ensure_no_port_binding_failure(port) [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] raise exception.PortBindingFailed(port_id=port['id']) [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] nova.exception.PortBindingFailed: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. [ 569.188712] env[63489]: ERROR nova.compute.manager [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] [ 569.189077] env[63489]: DEBUG nova.compute.utils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 569.189606] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.966s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.191402] env[63489]: INFO nova.compute.claims [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.194421] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Build of instance 276c32a2-da0f-420f-a9f2-b13c1fd62586 was re-scheduled: Binding failed for port 1f64771d-16d2-47ac-a3ff-99a3863e1857, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 569.194904] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 569.195152] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquiring lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.195301] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Acquired lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.195460] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 569.258766] env[63489]: DEBUG oslo_concurrency.lockutils [None req-79e72a16-4df8-4dda-b5c7-c040f1620f07 tempest-TenantUsagesTestJSON-1781697702 tempest-TenantUsagesTestJSON-1781697702-project-member] Acquiring lock "3730f79b-eb8f-4be4-8a51-488819b9e350" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.259278] env[63489]: DEBUG oslo_concurrency.lockutils [None req-79e72a16-4df8-4dda-b5c7-c040f1620f07 tempest-TenantUsagesTestJSON-1781697702 tempest-TenantUsagesTestJSON-1781697702-project-member] Lock "3730f79b-eb8f-4be4-8a51-488819b9e350" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.288121] env[63489]: DEBUG oslo_concurrency.lockutils [None req-50d931f3-947f-42c6-a2db-4140ef450cfc tempest-ServerShowV247Test-827764550 tempest-ServerShowV247Test-827764550-project-member] Acquiring lock "2cfefb86-cc08-45c6-bcca-86fb35208698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.288342] env[63489]: DEBUG oslo_concurrency.lockutils [None req-50d931f3-947f-42c6-a2db-4140ef450cfc tempest-ServerShowV247Test-827764550 tempest-ServerShowV247Test-827764550-project-member] Lock "2cfefb86-cc08-45c6-bcca-86fb35208698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.372037] env[63489]: INFO nova.compute.manager [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] [instance: 5e32d6c0-d943-416f-9a54-e3511c933ca9] Took 1.03 seconds to deallocate network for instance. [ 569.550459] env[63489]: DEBUG oslo_concurrency.lockutils [None req-cc741ae9-bb25-4a35-9002-da7460620c90 tempest-ServerShowV247Test-827764550 tempest-ServerShowV247Test-827764550-project-member] Acquiring lock "edd906e9-e13c-495c-b50d-f505009d80d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.550696] env[63489]: DEBUG oslo_concurrency.lockutils [None req-cc741ae9-bb25-4a35-9002-da7460620c90 tempest-ServerShowV247Test-827764550 tempest-ServerShowV247Test-827764550-project-member] Lock "edd906e9-e13c-495c-b50d-f505009d80d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.715724] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.783797] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.285888] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Releasing lock "refresh_cache-276c32a2-da0f-420f-a9f2-b13c1fd62586" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.286180] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 570.286294] env[63489]: DEBUG nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Deallocating network for instance {{(pid=63489) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 570.286476] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] deallocate_for_instance() {{(pid=63489) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 570.306508] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Instance cache missing network info. {{(pid=63489) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.409694] env[63489]: INFO nova.scheduler.client.report [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Deleted allocations for instance 5e32d6c0-d943-416f-9a54-e3511c933ca9 [ 570.614554] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7142af18-4579-488e-ba15-3b459c2d3fde {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.623095] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421ce084-194e-43f7-b4af-1eb53f0f7e40 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.655077] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59590f11-3043-4e56-9774-9f85c35ae644 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.663982] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d802afde-1319-4eef-b104-0a4afba792e1 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.677736] env[63489]: DEBUG nova.compute.provider_tree [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.809486] env[63489]: DEBUG nova.network.neutron [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Updating instance_info_cache with network_info: [] {{(pid=63489) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.920943] env[63489]: DEBUG oslo_concurrency.lockutils [None req-c0c91211-9804-488e-a6c4-130f3578c8c8 tempest-InstanceActionsTestJSON-1400940690 tempest-InstanceActionsTestJSON-1400940690-project-member] Lock "5e32d6c0-d943-416f-9a54-e3511c933ca9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.324s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.182034] env[63489]: DEBUG nova.scheduler.client.report [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.312226] env[63489]: INFO nova.compute.manager [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] [instance: 276c32a2-da0f-420f-a9f2-b13c1fd62586] Took 1.03 seconds to deallocate network for instance. [ 571.424156] env[63489]: DEBUG nova.compute.manager [None req-9ca69050-8458-4a46-998e-a47b2b6170c0 tempest-ServerActionsV293TestJSON-838824961 tempest-ServerActionsV293TestJSON-838824961-project-member] [instance: 1e26edbe-4f6a-4e14-af55-48888910eb9b] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 571.687437] env[63489]: DEBUG oslo_concurrency.lockutils [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.688044] env[63489]: DEBUG nova.compute.manager [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Start building networks asynchronously for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 571.690769] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.163s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.945329] env[63489]: DEBUG oslo_concurrency.lockutils [None req-9ca69050-8458-4a46-998e-a47b2b6170c0 tempest-ServerActionsV293TestJSON-838824961 tempest-ServerActionsV293TestJSON-838824961-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.196123] env[63489]: DEBUG nova.compute.utils [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Using /dev/sd instead of None {{(pid=63489) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.205416] env[63489]: DEBUG nova.compute.manager [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Allocating IP information in the background. {{(pid=63489) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 572.205747] env[63489]: DEBUG nova.network.neutron [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] allocate_for_instance() {{(pid=63489) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 572.285769] env[63489]: DEBUG nova.policy [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '316d520f1697451f992eb81708b82f4c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b04dbffa08364859aeb5c7cd2ce7acd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63489) authorize /opt/stack/nova/nova/policy.py:201}} [ 572.355127] env[63489]: INFO nova.scheduler.client.report [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Deleted allocations for instance 276c32a2-da0f-420f-a9f2-b13c1fd62586 [ 572.699694] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7b3b20-205b-4c70-a027-1d74b5c5ccc0 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.707501] env[63489]: DEBUG nova.compute.manager [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Start building block device mappings for instance. {{(pid=63489) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 572.714017] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ecc28c-621a-42a4-8ce9-e70179bfe44e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.746657] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479501eb-5776-4bbf-a1e5-7f2e0bdeec45 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.756172] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814eb206-e675-49d2-8792-ed8bbd0637de {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.771770] env[63489]: DEBUG nova.compute.provider_tree [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Inventory has not changed in ProviderTree for provider: 6b569bc3-63ff-4af7-bc85-277940cdadde {{(pid=63489) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.868948] env[63489]: DEBUG oslo_concurrency.lockutils [None req-3b4dee36-dc01-4d4b-bd5e-f92a35a9d635 tempest-MigrationsAdminTest-1565328470 tempest-MigrationsAdminTest-1565328470-project-member] Lock "276c32a2-da0f-420f-a9f2-b13c1fd62586" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.864s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.890090] env[63489]: DEBUG nova.network.neutron [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Successfully created port: 9ff2fdbd-5781-4119-80af-96b56677dbcb {{(pid=63489) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.278719] env[63489]: DEBUG nova.scheduler.client.report [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Inventory has not changed for provider 6b569bc3-63ff-4af7-bc85-277940cdadde based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 181, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63489) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.372210] env[63489]: DEBUG nova.compute.manager [None req-4a7217a7-e792-4de2-accd-f27068b98e41 tempest-AttachInterfacesV270Test-564248798 tempest-AttachInterfacesV270Test-564248798-project-member] [instance: 1c9f6ec2-07a8-420b-9fa9-08bc4f94400f] Starting instance... {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 573.632734] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e3f20a83-a0fd-4a92-846c-2aed2817b6f7 tempest-ServersTestBootFromVolume-879122079 tempest-ServersTestBootFromVolume-879122079-project-member] Acquiring lock "4546f905-4422-4de1-a17c-9b044f289dad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.632987] env[63489]: DEBUG oslo_concurrency.lockutils [None req-e3f20a83-a0fd-4a92-846c-2aed2817b6f7 tempest-ServersTestBootFromVolume-879122079 tempest-ServersTestBootFromVolume-879122079-project-member] Lock "4546f905-4422-4de1-a17c-9b044f289dad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.726670] env[63489]: DEBUG nova.compute.manager [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Start spawning the instance on the hypervisor. {{(pid=63489) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 573.757722] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-30T19:14:49Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='604346812',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1709701001',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-30T19:12:43Z,direct_url=,disk_format='vmdk',id=314454d0-cea8-4ac2-8a2e-d19d8731016c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='04ce9f22a8e54c05b09ebce44d46279d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-30T19:12:43Z,virtual_size=,visibility=), allow threads: False {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 573.758017] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Flavor limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 573.758206] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Image limits 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.758392] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Flavor pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 573.758539] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Image pref 0:0:0 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.758684] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63489) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 573.758910] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 573.759093] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 573.759266] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Got 1 possible topologies {{(pid=63489) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 573.759429] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 573.759600] env[63489]: DEBUG nova.virt.hardware [None req-a8ecb480-dfc2-43f5-86e7-a737dcd52a9a tempest-ServersWithSpecificFlavorTestJSON-320940198 tempest-ServersWithSpecificFlavorTestJSON-320940198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63489) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 573.760516] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8531d92-41c5-4424-8910-672617f8e94e {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.772210] env[63489]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e530e78-ebdc-4d53-8bb0-af1e1fca6237 {{(pid=63489) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.783612] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.093s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.784366] env[63489]: ERROR nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Traceback (most recent call last): [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self.driver.spawn(context, instance, image_meta, [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] vm_ref = self.build_virtual_machine(instance, [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 573.784366] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] for vif in network_info: [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return self._sync_wrapper(fn, *args, **kwargs) [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self.wait() [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self[:] = self._gt.wait() [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return self._exit_event.wait() [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] result = hub.switch() [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 573.784726] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return self.greenlet.switch() [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] result = function(*args, **kwargs) [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] return func(*args, **kwargs) [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] raise e [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] nwinfo = self.network_api.allocate_for_instance( [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] created_port_ids = self._update_ports_for_instance( [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] with excutils.save_and_reraise_exception(): [ 573.785114] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] self.force_reraise() [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] raise self.value [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] updated_port = self._update_port( [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] _ensure_no_port_binding_failure(port) [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] raise exception.PortBindingFailed(port_id=port['id']) [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] nova.exception.PortBindingFailed: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. [ 573.785482] env[63489]: ERROR nova.compute.manager [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] [ 573.785811] env[63489]: DEBUG nova.compute.utils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. {{(pid=63489) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 573.793929] env[63489]: DEBUG oslo_concurrency.lockutils [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.155s {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.794157] env[63489]: DEBUG nova.objects.instance [None req-42130d84-391e-42d4-9527-2f5e379b82ad tempest-ServersAdmin275Test-2131968261 tempest-ServersAdmin275Test-2131968261-project-member] [instance: 013f2d36-9578-45d2-aff0-170b5fd97506] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63489) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 573.800063] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Build of instance 81e42a2c-fb30-42e2-a2a9-45f3184739e4 was re-scheduled: Binding failed for port 2e8328dc-8ee5-410b-9d33-bcb1f5ae51ac, please check neutron logs for more information. {{(pid=63489) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 573.800063] env[63489]: DEBUG nova.compute.manager [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Unplugging VIFs for instance {{(pid=63489) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 573.800063] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquiring lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.800063] env[63489]: DEBUG oslo_concurrency.lockutils [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] Acquired lock "refresh_cache-81e42a2c-fb30-42e2-a2a9-45f3184739e4" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.800337] env[63489]: DEBUG nova.network.neutron [None req-d15f7b01-2386-4212-b9e6-3188557d2c0f tempest-ImagesNegativeTestJSON-1022960513 tempest-ImagesNegativeTestJSON-1022960513-project-member] [instance: 81e42a2c-fb30-42e2-a2a9-45f3184739e4] Building network info cache for instance {{(pid=63489) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 573.898490] env[63489]: DEBUG oslo_concurrency.lockutils [None req-4a7217a7-e792-4de2-accd-f27068b98e41 tempest-AttachInterfacesV270Test-564248798 tempest-AttachInterfacesV270Test-564248798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63489) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.974637] env[63489]: DEBUG nova.compute.manager [req-39fb824c-6d2b-4e7b-b548-2d712c0f76d8 req-5f8eba91-eb09-458a-bfb3-d2f761f65b40 service nova] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Received event network-changed-9ff2fdbd-5781-4119-80af-96b56677dbcb {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.974830] env[63489]: DEBUG nova.compute.manager [req-39fb824c-6d2b-4e7b-b548-2d712c0f76d8 req-5f8eba91-eb09-458a-bfb3-d2f761f65b40 service nova] [instance: 1c440617-c0ec-485b-a2cc-cd0c8a9d60df] Refreshing instance network info cache due to event network-changed-9ff2fdbd-5781-4119-80af-96b56677dbcb. {{(pid=63489) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 573.975049] env[63489]: DEBUG oslo_concurrency.lockutils [req-39fb824c-6d2b-4e7b-b548-2d712c0f76d8 req-5f8eba91-eb09-458a-bfb3-d2f761f65b40 service nova] Acquiring lock "refresh_cache-1c440617-c0ec-485b-a2cc-cd0c8a9d60df" {{(pid=63489) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.975222] env[63489]: DEBUG oslo_concurrency.lockutils [req-39fb824c-6d2b-4e7b-b548-2d712c0f76d8 req-5f8eba91-eb09-458a-bfb3-d2f761f65b40 service nova] Acquired lock "refresh_cache-1c440617-c0ec-485b-a2cc-cd0c8a9d60df" {{(pid=63489) lock /opt/stack/data/venv/li