[ 458.249820] env[61905]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61905) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 458.250202] env[61905]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61905) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 458.250202] env[61905]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61905) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 458.250603] env[61905]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 458.350905] env[61905]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61905) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 458.361060] env[61905]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61905) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 458.967303] env[61905]: INFO nova.virt.driver [None req-3fcb7b3a-6667-4f4c-b0ef-00eec676b7e0 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 459.039210] env[61905]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.039395] env[61905]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 459.039535] env[61905]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61905) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 462.074643] env[61905]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-34da0a5a-ee8a-4e3c-8658-345b3a9d9f4f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.092177] env[61905]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61905) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 462.092177] env[61905]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2c9d1067-2971-40f2-8371-dc41e51de2bf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.117856] env[61905]: INFO oslo_vmware.api [-] Successfully established new session; session ID is fbf03. [ 462.118033] env[61905]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.079s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 462.118690] env[61905]: INFO nova.virt.vmwareapi.driver [None req-3fcb7b3a-6667-4f4c-b0ef-00eec676b7e0 None None] VMware vCenter version: 7.0.3 [ 462.122394] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85731f70-46fb-4f79-8595-4389937a3cac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.144914] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560362cd-b210-4ab4-be0b-d31d3c586904 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.151658] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9219c4d6-8f12-4551-91a9-0eca281553f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.159210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32b4fb3-701f-4063-8155-58ab626a4c10 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.174264] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789ef8d6-840f-4b33-802a-c18a34307777 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.180972] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b23fcc-856d-487a-91df-630368b455e8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.213473] env[61905]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-76a7465e-a2c3-4ef7-992b-80d3ebd3dc7c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.219546] env[61905]: DEBUG nova.virt.vmwareapi.driver [None req-3fcb7b3a-6667-4f4c-b0ef-00eec676b7e0 None None] Extension org.openstack.compute already exists. {{(pid=61905) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 462.222745] env[61905]: INFO nova.compute.provider_config [None req-3fcb7b3a-6667-4f4c-b0ef-00eec676b7e0 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 462.726069] env[61905]: DEBUG nova.context [None req-3fcb7b3a-6667-4f4c-b0ef-00eec676b7e0 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),c3077b73-4bec-4516-9202-0aa7e0b7fd4e(cell1) {{(pid=61905) load_cells /opt/stack/nova/nova/context.py:464}} [ 462.728221] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 462.728471] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 462.729165] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 462.729590] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Acquiring lock "c3077b73-4bec-4516-9202-0aa7e0b7fd4e" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 462.729805] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Lock "c3077b73-4bec-4516-9202-0aa7e0b7fd4e" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 462.730821] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Lock "c3077b73-4bec-4516-9202-0aa7e0b7fd4e" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 462.751126] env[61905]: INFO dbcounter [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Registered counter for database nova_cell0 [ 462.759411] env[61905]: INFO dbcounter [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Registered counter for database nova_cell1 [ 462.762442] env[61905]: DEBUG oslo_db.sqlalchemy.engines [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61905) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 462.762812] env[61905]: DEBUG oslo_db.sqlalchemy.engines [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61905) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 462.767827] env[61905]: ERROR nova.db.main.api [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 462.767827] env[61905]: result = function(*args, **kwargs) [ 462.767827] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 462.767827] env[61905]: return func(*args, **kwargs) [ 462.767827] env[61905]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 462.767827] env[61905]: result = fn(*args, **kwargs) [ 462.767827] env[61905]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 462.767827] env[61905]: return f(*args, **kwargs) [ 462.767827] env[61905]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 462.767827] env[61905]: return db.service_get_minimum_version(context, binaries) [ 462.767827] env[61905]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 462.767827] env[61905]: _check_db_access() [ 462.767827] env[61905]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 462.767827] env[61905]: stacktrace = ''.join(traceback.format_stack()) [ 462.767827] env[61905]: [ 462.768644] env[61905]: ERROR nova.db.main.api [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 462.768644] env[61905]: result = function(*args, **kwargs) [ 462.768644] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 462.768644] env[61905]: return func(*args, **kwargs) [ 462.768644] env[61905]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 462.768644] env[61905]: result = fn(*args, **kwargs) [ 462.768644] env[61905]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 462.768644] env[61905]: return f(*args, **kwargs) [ 462.768644] env[61905]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 462.768644] env[61905]: return db.service_get_minimum_version(context, binaries) [ 462.768644] env[61905]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 462.768644] env[61905]: _check_db_access() [ 462.768644] env[61905]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 462.768644] env[61905]: stacktrace = ''.join(traceback.format_stack()) [ 462.768644] env[61905]: [ 462.769231] env[61905]: WARNING nova.objects.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Failed to get minimum service version for cell c3077b73-4bec-4516-9202-0aa7e0b7fd4e [ 462.769231] env[61905]: WARNING nova.objects.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 462.769576] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Acquiring lock "singleton_lock" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 462.769736] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Acquired lock "singleton_lock" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 462.769979] env[61905]: DEBUG oslo_concurrency.lockutils [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Releasing lock "singleton_lock" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 462.770300] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Full set of CONF: {{(pid=61905) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 462.770442] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ******************************************************************************** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 462.770566] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Configuration options gathered from: {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 462.770701] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 462.770890] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 462.771025] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ================================================================================ {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 462.771231] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] allow_resize_to_same_host = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.771400] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] arq_binding_timeout = 300 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.771530] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] backdoor_port = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.771656] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] backdoor_socket = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.771816] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] block_device_allocate_retries = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.771977] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] block_device_allocate_retries_interval = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.772157] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cert = self.pem {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.772321] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.772489] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute_monitors = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.772660] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] config_dir = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.772827] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] config_drive_format = iso9660 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.772966] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.773154] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] config_source = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.773322] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] console_host = devstack {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.773484] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] control_exchange = nova {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.773644] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cpu_allocation_ratio = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.773805] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] daemon = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.773974] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] debug = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.774146] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] default_access_ip_network_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.774315] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] default_availability_zone = nova {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.774470] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] default_ephemeral_format = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.774628] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] default_green_pool_size = 1000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.774917] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.775095] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] default_schedule_zone = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.775261] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] disk_allocation_ratio = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.775425] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] enable_new_services = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.775700] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] enabled_apis = ['osapi_compute'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.775783] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] enabled_ssl_apis = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.775954] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] flat_injected = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.776132] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] force_config_drive = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.776293] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] force_raw_images = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.776462] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] graceful_shutdown_timeout = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.776622] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] heal_instance_info_cache_interval = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.776833] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] host = cpu-1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.777047] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.777244] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.777410] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.777623] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.777787] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_build_timeout = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.777951] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_delete_interval = 300 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.778133] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_format = [instance: %(uuid)s] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.778304] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_name_template = instance-%08x {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.778468] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_usage_audit = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.778638] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_usage_audit_period = month {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.778802] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.778970] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.779151] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] internal_service_availability_zone = internal {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.779308] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] key = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.779470] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] live_migration_retry_count = 30 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.779638] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_color = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.779802] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_config_append = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.779976] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.780148] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_dir = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.780310] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.780440] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_options = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.780602] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_rotate_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.780771] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_rotate_interval_type = days {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.780939] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] log_rotation_type = none {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.781085] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.781214] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.781383] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.781549] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.781676] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.781860] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] long_rpc_timeout = 1800 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782000] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] max_concurrent_builds = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782174] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] max_concurrent_live_migrations = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782331] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] max_concurrent_snapshots = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782488] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] max_local_block_devices = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782645] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] max_logfile_count = 30 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782805] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] max_logfile_size_mb = 200 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.782963] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] maximum_instance_delete_attempts = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.783141] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metadata_listen = 0.0.0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.783310] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metadata_listen_port = 8775 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.783476] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metadata_workers = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.783637] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] migrate_max_retries = -1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.783801] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] mkisofs_cmd = genisoimage {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.784013] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.784152] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] my_ip = 10.180.1.21 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.784316] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] network_allocate_retries = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.784496] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.784667] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.784850] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] osapi_compute_listen_port = 8774 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.785030] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] osapi_compute_unique_server_name_scope = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.785204] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] osapi_compute_workers = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.785367] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] password_length = 12 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.785527] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] periodic_enable = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.785688] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] periodic_fuzzy_delay = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.785851] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] pointer_model = usbtablet {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786022] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] preallocate_images = none {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786182] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] publish_errors = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786310] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] pybasedir = /opt/stack/nova {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786466] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ram_allocation_ratio = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786625] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] rate_limit_burst = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786792] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] rate_limit_except_level = CRITICAL {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.786951] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] rate_limit_interval = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.787146] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reboot_timeout = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.787312] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reclaim_instance_interval = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.787468] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] record = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.787635] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reimage_timeout_per_gb = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.787799] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] report_interval = 120 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.787964] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] rescue_timeout = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.788138] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reserved_host_cpus = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.788298] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reserved_host_disk_mb = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.788458] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reserved_host_memory_mb = 512 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.788616] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] reserved_huge_pages = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.788776] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] resize_confirm_window = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.788935] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] resize_fs_using_block_device = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.789106] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] resume_guests_state_on_host_boot = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.789276] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.789440] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] rpc_response_timeout = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.789598] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] run_external_periodic_tasks = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.789764] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] running_deleted_instance_action = reap {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.789926] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.790096] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] running_deleted_instance_timeout = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.790258] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler_instance_sync_interval = 120 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.790427] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_down_time = 720 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.790594] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] servicegroup_driver = db {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.790752] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] shell_completion = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.790913] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] shelved_offload_time = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.791083] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] shelved_poll_interval = 3600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.791254] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] shutdown_timeout = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.791414] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] source_is_ipv6 = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.791574] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ssl_only = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.791814] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.791986] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] sync_power_state_interval = 600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.792164] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] sync_power_state_pool_size = 1000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.792333] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] syslog_log_facility = LOG_USER {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.792490] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] tempdir = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.792652] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] timeout_nbd = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.792816] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] transport_url = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.792977] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] update_resources_interval = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.793151] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_cow_images = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.793311] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_eventlog = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.793469] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_journal = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.793627] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_json = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.793785] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_rootwrap_daemon = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.793943] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_stderr = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.794111] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] use_syslog = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.794268] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vcpu_pin_set = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.794432] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plugging_is_fatal = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.794596] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plugging_timeout = 300 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.794793] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] virt_mkfs = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.794966] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] volume_usage_poll_interval = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.795141] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] watch_log_file = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.795313] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] web = /usr/share/spice-html5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 462.795497] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.795685] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.795907] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.796047] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_concurrency.disable_process_locking = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.796338] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.796525] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.796693] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.796867] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.797064] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.797248] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.797433] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.auth_strategy = keystone {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.797603] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.compute_link_prefix = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.797781] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.797957] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.dhcp_domain = novalocal {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.798151] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.enable_instance_password = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.798304] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.glance_link_prefix = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.798473] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.798644] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.798811] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.instance_list_per_project_cells = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.798974] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.list_records_by_skipping_down_cells = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.799175] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.local_metadata_per_cell = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.799355] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.max_limit = 1000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.799526] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.metadata_cache_expiration = 15 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.799701] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.neutron_default_tenant_id = default {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.799873] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.response_validation = warn {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.800053] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.use_neutron_default_nets = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.800228] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.800390] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.800557] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.800727] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.800903] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_dynamic_targets = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.801078] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_jsonfile_path = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.801264] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.801460] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.backend = dogpile.cache.memcached {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.801633] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.backend_argument = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.801811] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.config_prefix = cache.oslo {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.801981] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.dead_timeout = 60.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.802160] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.debug_cache_backend = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.802325] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.enable_retry_client = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.802488] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.enable_socket_keepalive = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.802658] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.enabled = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.802823] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.enforce_fips_mode = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.802989] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.expiration_time = 600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.803165] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.hashclient_retry_attempts = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.803334] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.803499] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_dead_retry = 300 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.803656] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_password = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.803822] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.803992] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.804174] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_pool_maxsize = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.804338] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.804502] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_sasl_enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.804697] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.804882] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.805056] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.memcache_username = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.805543] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.proxies = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.805543] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_db = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.805543] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_password = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.805728] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.805923] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.806112] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_server = localhost:6379 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.806280] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_socket_timeout = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.806439] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.redis_username = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.806602] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.retry_attempts = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.806766] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.retry_delay = 0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.806930] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.socket_keepalive_count = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.807134] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.socket_keepalive_idle = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.807308] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.socket_keepalive_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.807468] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.tls_allowed_ciphers = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.807625] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.tls_cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.807782] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.tls_certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.807945] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.tls_enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.808114] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cache.tls_keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.808288] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.808467] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.auth_type = password {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.808631] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.808808] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.808972] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.809151] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.809316] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.cross_az_attach = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.809504] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.debug = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.809668] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.endpoint_template = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.809836] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.http_retries = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.809998] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.810173] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.810344] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.os_region_name = RegionOne {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.810511] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.810672] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cinder.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.810847] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.811014] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.cpu_dedicated_set = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.811178] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.cpu_shared_set = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.811346] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.image_type_exclude_list = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.811510] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.811692] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.811861] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.812033] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.812207] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.812371] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.resource_provider_association_refresh = 300 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.812533] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.812699] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.shutdown_retry_interval = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.812892] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.813086] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] conductor.workers = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.813274] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] console.allowed_origins = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.813454] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] console.ssl_ciphers = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.813626] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] console.ssl_minimum_version = default {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.813798] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] consoleauth.enforce_session_timeout = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.813969] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] consoleauth.token_ttl = 600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.814158] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.814317] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.814481] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.814639] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.814824] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.814990] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.815167] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.815327] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.815487] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.815646] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.815831] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.region_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.815995] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.816169] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.816338] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.service_type = accelerator {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.816498] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.816713] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.816815] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.816974] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.817170] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.817335] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] cyborg.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.817514] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.backend = sqlalchemy {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.817686] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.connection = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.817905] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.connection_debug = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.818131] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.connection_parameters = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.818312] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.connection_recycle_time = 3600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.818477] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.connection_trace = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.818640] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.db_inc_retry_interval = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.818807] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.db_max_retries = 20 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.818971] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.db_max_retry_interval = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.819149] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.db_retry_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.819314] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.max_overflow = 50 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.819475] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.max_pool_size = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.819636] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.max_retries = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.819805] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.819967] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.mysql_wsrep_sync_wait = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.820138] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.pool_timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.820301] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.retry_interval = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.820456] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.slave_connection = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.820615] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.sqlite_synchronous = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.820774] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] database.use_db_reconnect = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.820950] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.backend = sqlalchemy {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.821131] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.connection = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.821298] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.connection_debug = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.821467] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.connection_parameters = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.821628] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.connection_recycle_time = 3600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.821788] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.connection_trace = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.821959] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.db_inc_retry_interval = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.822149] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.db_max_retries = 20 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.822314] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.db_max_retry_interval = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.822477] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.db_retry_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.822638] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.max_overflow = 50 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.822798] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.max_pool_size = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.822961] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.max_retries = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.823145] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.823306] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.823465] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.pool_timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.823626] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.retry_interval = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.823784] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.slave_connection = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.823946] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] api_database.sqlite_synchronous = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.824137] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] devices.enabled_mdev_types = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.824314] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.824485] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.824647] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ephemeral_storage_encryption.enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.824847] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.825035] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.api_servers = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.825206] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.825369] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.825534] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.825717] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.825897] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.826085] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.debug = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.826262] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.default_trusted_certificate_ids = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.826426] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.enable_certificate_validation = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.826589] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.enable_rbd_download = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.826749] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.826916] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.827096] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.827262] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.827421] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.827585] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.num_retries = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.827757] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.rbd_ceph_conf = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.827923] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.rbd_connect_timeout = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.828104] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.rbd_pool = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.828335] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.rbd_user = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.828512] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.region_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.828678] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.828840] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.829018] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.service_type = image {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.829190] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.829358] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.829519] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.829681] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.829863] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.830040] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.verify_glance_signatures = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.830204] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] glance.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.830371] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] guestfs.debug = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.830535] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] mks.enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.830891] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.831094] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] image_cache.manager_interval = 2400 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.831269] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] image_cache.precache_concurrency = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.831441] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] image_cache.remove_unused_base_images = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.831612] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.831783] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.831959] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] image_cache.subdirectory_name = _base {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.832149] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.api_max_retries = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.832317] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.api_retry_interval = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.832479] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.832640] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.auth_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.832799] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.832958] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.833133] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.833298] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.conductor_group = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.833459] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.833618] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.833778] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.833943] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.834115] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.834278] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.834436] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.834602] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.peer_list = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.834785] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.region_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.834954] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.835134] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.serial_console_state_timeout = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.835297] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.835468] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.service_type = baremetal {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.835628] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.shard = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.835792] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.835952] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.836126] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.836288] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.836468] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.836628] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ironic.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.836810] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.836987] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] key_manager.fixed_key = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.837186] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.837351] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.barbican_api_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.837511] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.barbican_endpoint = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.837680] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.barbican_endpoint_type = public {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.837840] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.barbican_region_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.838009] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.838175] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.838388] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.838583] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.838750] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.838920] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.number_of_retries = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.839095] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.retry_delay = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.839264] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.send_service_user_token = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.839429] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.839590] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.839754] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.verify_ssl = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.839915] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican.verify_ssl_path = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.840093] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.840261] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.auth_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.840420] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.840578] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.840740] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.840900] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.841066] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.841234] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.841391] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] barbican_service_user.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.841557] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.approle_role_id = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.841717] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.approle_secret_id = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.841888] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.kv_mountpoint = secret {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.842064] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.kv_path = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.842236] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.kv_version = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.842397] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.namespace = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.842555] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.root_token_id = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.842716] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.ssl_ca_crt_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.842883] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.timeout = 60.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.843054] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.use_ssl = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.843227] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.843401] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.843563] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.auth_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.843721] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.843883] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.844054] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.844217] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.844376] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.844532] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.844708] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.844877] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.845046] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.845209] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.845365] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.region_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.845523] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.845679] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.845848] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.service_type = identity {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.846014] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.846182] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.846345] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.846503] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.846685] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.846845] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] keystone.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.847067] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.connection_uri = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.847218] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_mode = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.847384] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.847554] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_models = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.847725] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_power_governor_high = performance {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.847895] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.848070] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_power_management = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.848244] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.848414] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.device_detach_attempts = 8 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.848644] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.device_detach_timeout = 20 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.848830] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.disk_cachemodes = {} {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.848998] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.disk_prefix = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.849185] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.enabled_perf_events = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.849354] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.file_backed_memory = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.849521] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.gid_maps = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.849682] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.hw_disk_discard = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.849843] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.hw_machine_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.850026] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_rbd_ceph_conf = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.850203] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.850369] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.850539] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_rbd_glance_store_name = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.850709] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_rbd_pool = rbd {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.850881] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_type = default {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.851050] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.images_volume_group = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.851218] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.inject_key = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.851383] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.inject_partition = -2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.851544] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.inject_password = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.851707] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.iscsi_iface = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.851870] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.iser_use_multipath = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.852045] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.852214] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.852377] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_downtime = 500 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.852540] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.852706] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.852866] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_inbound_addr = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.853037] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.853208] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.853370] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_scheme = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.853543] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_timeout_action = abort {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.853705] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_tunnelled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.853866] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_uri = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.854038] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.live_migration_with_native_tls = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.854202] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.max_queues = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.854366] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.854603] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.854795] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.nfs_mount_options = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.855105] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.855287] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.855465] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.855630] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.855797] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.855963] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.num_pcie_ports = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.856146] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.856317] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.pmem_namespaces = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.856481] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.quobyte_client_cfg = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.856764] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.856939] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.857175] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.857312] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.857475] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rbd_secret_uuid = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.857637] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rbd_user = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.857804] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.857981] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.858157] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rescue_image_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.858318] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rescue_kernel_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.858475] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rescue_ramdisk_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.858689] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.858885] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.rx_queue_size = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.859071] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.smbfs_mount_options = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.859355] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.859532] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.snapshot_compression = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.859698] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.snapshot_image_format = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.859919] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.860097] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.sparse_logical_volumes = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.860265] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.swtpm_enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.860437] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.swtpm_group = tss {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.860608] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.swtpm_user = tss {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.860780] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.sysinfo_serial = unique {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.860944] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.tb_cache_size = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.861116] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.tx_queue_size = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.861311] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.uid_maps = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.861492] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.use_virtio_for_bridges = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.861667] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.virt_type = kvm {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.861836] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.volume_clear = zero {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.862007] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.volume_clear_size = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.862184] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.volume_use_multipath = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.862345] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_cache_path = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.862515] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.862686] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.862853] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.863029] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.863310] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.863487] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.vzstorage_mount_user = stack {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.863654] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.863831] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.864015] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.auth_type = password {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.864189] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.864349] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.864512] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.864681] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.864867] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.865052] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.default_floating_pool = public {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.865214] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.865377] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.extension_sync_interval = 600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.865539] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.http_retries = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.865727] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.865912] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.866088] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.866264] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.866423] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.866590] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.ovs_bridge = br-int {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.866761] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.physnets = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.866928] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.region_name = RegionOne {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.867122] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.867300] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.service_metadata_proxy = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.867460] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.867626] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.service_type = network {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.867786] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.867946] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.868116] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.868277] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.868457] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.868618] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] neutron.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.868788] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] notifications.bdms_in_notifications = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.868963] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] notifications.default_level = INFO {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.869151] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] notifications.notification_format = unversioned {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.869318] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] notifications.notify_on_state_change = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.869494] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.869670] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] pci.alias = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.869841] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] pci.device_spec = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.870010] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] pci.report_in_placement = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.870195] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.870366] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.auth_type = password {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.870532] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.870704] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.870867] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871038] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871202] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871360] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871517] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.default_domain_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871672] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.default_domain_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871830] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.domain_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.871984] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.domain_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.872151] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.872311] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.872464] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.872616] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.872772] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.872934] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.password = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.873100] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.project_domain_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.873265] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.project_domain_name = Default {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.873433] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.project_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.873603] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.project_name = service {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.873769] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.region_name = RegionOne {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.873933] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.874098] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.874268] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.service_type = placement {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.874429] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.874584] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.874771] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.874950] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.system_scope = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.875114] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.875271] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.trust_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.875427] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.user_domain_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.875592] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.user_domain_name = Default {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.875768] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.user_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.875949] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.username = nova {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.876143] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.876304] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] placement.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.876480] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.cores = 20 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.876646] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.count_usage_from_placement = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.876816] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.876994] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.injected_file_content_bytes = 10240 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.877175] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.injected_file_path_length = 255 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.877391] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.injected_files = 5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.877506] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.instances = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.877672] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.key_pairs = 100 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.877840] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.metadata_items = 128 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.878010] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.ram = 51200 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.878182] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.recheck_quota = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.878349] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.server_group_members = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.878512] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] quota.server_groups = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.878683] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.878861] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.879029] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.image_metadata_prefilter = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.879196] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.879358] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.max_attempts = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.879520] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.max_placement_results = 1000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.879681] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.879843] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.880013] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.880188] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] scheduler.workers = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.880363] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.880533] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.880711] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.880881] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.881060] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.881226] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.881389] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.881578] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.881747] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.host_subset_size = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.881911] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.882081] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.882246] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.882410] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.isolated_hosts = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.882572] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.isolated_images = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.882748] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.882914] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.883089] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.883254] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.pci_in_placement = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.883416] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.883575] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.883734] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.883895] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.884066] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.884231] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.884392] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.track_instance_changes = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.884569] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.884759] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metrics.required = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.884932] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metrics.weight_multiplier = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.885112] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.885282] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] metrics.weight_setting = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.885593] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.885789] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] serial_console.enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.885974] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] serial_console.port_range = 10000:20000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.886162] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.886333] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.886502] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] serial_console.serialproxy_port = 6083 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.886672] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.886888] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.auth_type = password {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887011] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887177] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887339] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887502] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887652] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887821] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.send_service_user_token = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.887985] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.888171] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] service_user.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.888342] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.agent_enabled = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.888506] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.888811] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889019] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889187] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.html5proxy_port = 6082 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889347] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.image_compression = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889503] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.jpeg_compression = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889660] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.playback_compression = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889819] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.require_secure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.889987] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.server_listen = 127.0.0.1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.890171] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.890331] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.streaming_mode = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.890487] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] spice.zlib_compression = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.890656] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] upgrade_levels.baseapi = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.890818] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] upgrade_levels.compute = auto {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.890976] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] upgrade_levels.conductor = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.891145] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] upgrade_levels.scheduler = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.891312] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.891471] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.891628] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.891786] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.891949] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.892121] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.892281] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.892441] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.892596] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vendordata_dynamic_auth.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.892769] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.api_retry_count = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.892928] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.ca_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.893109] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.893281] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.cluster_name = testcl1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.893446] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.connection_pool_size = 10 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.893604] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.console_delay_seconds = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.893770] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.datastore_regex = ^datastore.* {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.893970] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.894156] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.host_password = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.894323] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.host_port = 443 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.894496] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.host_username = administrator@vsphere.local {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.894678] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.insecure = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.894867] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.integration_bridge = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.895047] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.maximum_objects = 100 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.895212] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.pbm_default_policy = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.895376] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.pbm_enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.895535] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.pbm_wsdl_location = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.895723] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.895952] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.serial_port_proxy_uri = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.896196] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.serial_port_service_uri = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.896433] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.task_poll_interval = 0.5 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.896676] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.use_linked_clone = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.896922] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.vnc_keymap = en-us {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.897177] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.vnc_port = 5900 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.897412] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vmware.vnc_port_total = 10000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.897675] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.auth_schemes = ['none'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.897926] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.898342] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.898603] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.898851] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.novncproxy_port = 6080 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.899116] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.server_listen = 127.0.0.1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.899369] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.899600] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.vencrypt_ca_certs = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.899829] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.vencrypt_client_cert = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.900070] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vnc.vencrypt_client_key = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.900326] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.900560] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.disable_deep_image_inspection = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.900790] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.901034] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.901268] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.901500] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.disable_rootwrap = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.901731] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.enable_numa_live_migration = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.901988] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.902233] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.902464] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.902694] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.libvirt_disable_apic = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.902923] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.903177] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.903412] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.903642] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.903870] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.904110] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.904342] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.904573] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.904811] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.905063] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.905329] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.905574] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.client_socket_timeout = 900 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.905818] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.default_pool_size = 1000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.906066] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.keep_alive = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.906309] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.max_header_line = 16384 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.906542] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.906771] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.ssl_ca_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.907016] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.ssl_cert_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.907250] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.ssl_key_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.907486] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.tcp_keepidle = 600 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.907740] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.907980] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] zvm.ca_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.908223] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] zvm.cloud_connector_url = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.908615] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.908861] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] zvm.reachable_timeout = 300 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.909129] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.enforce_new_defaults = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.909677] env[61905]: WARNING oslo_config.cfg [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 462.909939] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.enforce_scope = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.910229] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.policy_default_rule = default {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.910492] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.910773] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.policy_file = policy.yaml {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.911039] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.911278] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.911508] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.911737] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.911970] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.912228] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.912475] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.912731] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.connection_string = messaging:// {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.912971] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.enabled = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.913230] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.es_doc_type = notification {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.913467] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.es_scroll_size = 10000 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.913710] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.es_scroll_time = 2m {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.913949] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.filter_error_trace = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.914231] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.hmac_keys = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.914473] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.sentinel_service_name = mymaster {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.914722] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.socket_timeout = 0.1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.914953] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.trace_requests = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.915198] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler.trace_sqlalchemy = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.915451] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler_jaeger.process_tags = {} {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.915686] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler_jaeger.service_name_prefix = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.915917] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] profiler_otlp.service_name_prefix = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.916163] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] remote_debug.host = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.916407] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] remote_debug.port = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.916663] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.916897] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.917147] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.917387] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.917617] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.917849] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.918108] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.918349] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.918581] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.918824] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.919073] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.919343] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.919581] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.919820] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.920076] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.920319] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.920552] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.920798] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.921044] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.921281] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.921518] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.921754] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.922006] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.922266] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.922500] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.922739] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.922977] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.923224] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.923466] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.923705] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.ssl = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.923953] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.924215] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.924445] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.924705] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.924948] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.925198] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.925462] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.925703] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_notifications.retry = -1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.925970] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.926244] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.926494] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.auth_section = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.926764] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.auth_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.927043] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.cafile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.927296] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.certfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.927537] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.collect_timing = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.927770] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.connect_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.928014] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.connect_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.928261] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.endpoint_id = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.928490] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.endpoint_override = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.928723] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.insecure = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.928949] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.keyfile = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.929187] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.max_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.929416] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.min_version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.929645] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.region_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.929882] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.retriable_status_codes = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.930133] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.service_name = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.930370] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.service_type = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.930610] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.split_loggers = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.930841] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.status_code_retries = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.931090] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.status_code_retry_delay = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.931326] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.timeout = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.931558] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.valid_interfaces = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.931785] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_limit.version = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.932038] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_reports.file_event_handler = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.932281] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.932510] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] oslo_reports.log_dir = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.932768] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.933013] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.933255] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.933493] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.933734] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.933970] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.934233] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.934464] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_ovs_privileged.group = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.934714] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.934943] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.935195] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.935422] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] vif_plug_ovs_privileged.user = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.935665] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.935925] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.936204] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.936460] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.936705] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.936950] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.937203] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.937442] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.937701] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.937958] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.isolate_vif = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.938220] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.938469] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.938722] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.938980] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.939239] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] os_vif_ovs.per_port_bridge = False {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.939495] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] privsep_osbrick.capabilities = [21] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.939733] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] privsep_osbrick.group = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.939970] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] privsep_osbrick.helper_command = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.940229] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.940468] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.940694] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] privsep_osbrick.user = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.940945] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.941208] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] nova_sys_admin.group = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.941442] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] nova_sys_admin.helper_command = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.941680] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.941918] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.942155] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] nova_sys_admin.user = None {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 462.942356] env[61905]: DEBUG oslo_service.service [None req-680d8c4a-11ea-49c5-86b9-3586e4caf8c5 None None] ******************************************************************************** {{(pid=61905) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 462.943038] env[61905]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 463.447199] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Getting list of instances from cluster (obj){ [ 463.447199] env[61905]: value = "domain-c8" [ 463.447199] env[61905]: _type = "ClusterComputeResource" [ 463.447199] env[61905]: } {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 463.448354] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e53ae1f-c9c2-4bd2-873f-45a783817bf7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 463.457434] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Got total of 0 instances {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 463.457979] env[61905]: WARNING nova.virt.vmwareapi.driver [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 463.458459] env[61905]: INFO nova.virt.node [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Generated node identity 9cb855ec-212a-457a-a4ff-55e9d97323b7 [ 463.458693] env[61905]: INFO nova.virt.node [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Wrote node identity 9cb855ec-212a-457a-a4ff-55e9d97323b7 to /opt/stack/data/n-cpu-1/compute_id [ 463.961813] env[61905]: WARNING nova.compute.manager [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Compute nodes ['9cb855ec-212a-457a-a4ff-55e9d97323b7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 464.968422] env[61905]: INFO nova.compute.manager [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 465.977033] env[61905]: WARNING nova.compute.manager [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 465.977033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 465.977033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 465.977033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 465.977033] env[61905]: DEBUG nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 465.977033] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bc990f-67a6-4289-988a-19fd0b6dbb52 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 465.985824] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99d7a59-0c7b-44f9-ade8-b45e997c3a79 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 465.999482] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d147232-47ff-400a-aeea-3bde2efc705b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.005832] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a796d301-c42e-4078-9a6d-f7d9e8cfc3b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 466.034655] env[61905]: DEBUG nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181482MB free_disk=150GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 466.035069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 466.036309] env[61905]: DEBUG oslo_concurrency.lockutils [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 466.539842] env[61905]: WARNING nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] No compute node record for cpu-1:9cb855ec-212a-457a-a4ff-55e9d97323b7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 9cb855ec-212a-457a-a4ff-55e9d97323b7 could not be found. [ 467.042540] env[61905]: INFO nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 9cb855ec-212a-457a-a4ff-55e9d97323b7 [ 468.552044] env[61905]: DEBUG nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 468.552044] env[61905]: DEBUG nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 468.701698] env[61905]: INFO nova.scheduler.client.report [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] [req-5f01b67a-f680-4def-9c58-e6528d90beb6] Created resource provider record via placement API for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 468.720080] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96246fdb-1f7e-4418-890d-c5a15693a129 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 468.725969] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b94a016-fc77-4bc2-93e2-14f488d9abea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 468.754848] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3216edcc-8e1e-4403-88ba-d9fe9431410d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 468.761680] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52fc594-0ce0-4be3-aa01-4339a5d90e1f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 468.774243] env[61905]: DEBUG nova.compute.provider_tree [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 469.311422] env[61905]: DEBUG nova.scheduler.client.report [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Updated inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 469.311422] env[61905]: DEBUG nova.compute.provider_tree [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Updating resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 generation from 0 to 1 during operation: update_inventory {{(pid=61905) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 469.311422] env[61905]: DEBUG nova.compute.provider_tree [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 469.362440] env[61905]: DEBUG nova.compute.provider_tree [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Updating resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 generation from 1 to 2 during operation: update_traits {{(pid=61905) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 469.869897] env[61905]: DEBUG nova.compute.resource_tracker [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 469.869897] env[61905]: DEBUG oslo_concurrency.lockutils [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.832s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 469.869897] env[61905]: DEBUG nova.service [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Creating RPC server for service compute {{(pid=61905) start /opt/stack/nova/nova/service.py:186}} [ 469.881496] env[61905]: DEBUG nova.service [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] Join ServiceGroup membership for this service compute {{(pid=61905) start /opt/stack/nova/nova/service.py:203}} [ 469.881916] env[61905]: DEBUG nova.servicegroup.drivers.db [None req-23cf634a-5d36-4956-87e9-a07e3212e854 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61905) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 484.885839] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_power_states {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 485.389521] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Getting list of instances from cluster (obj){ [ 485.389521] env[61905]: value = "domain-c8" [ 485.389521] env[61905]: _type = "ClusterComputeResource" [ 485.389521] env[61905]: } {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 485.390772] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79fa44c0-729c-42c2-a731-cf0031279b97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.399201] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Got total of 0 instances {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 485.399439] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 485.399736] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Getting list of instances from cluster (obj){ [ 485.399736] env[61905]: value = "domain-c8" [ 485.399736] env[61905]: _type = "ClusterComputeResource" [ 485.399736] env[61905]: } {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 485.400568] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4878cc1-a57c-46eb-9a5c-6a0bc18b33f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.407671] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Got total of 0 instances {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 508.939282] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "9cacbcf0-63a4-4926-b007-07657b164e99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.939282] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "9cacbcf0-63a4-4926-b007-07657b164e99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.443649] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 509.740144] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquiring lock "aa565525-d5b0-4dc4-9f20-30542ee3e52f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.740144] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Lock "aa565525-d5b0-4dc4-9f20-30542ee3e52f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.996231] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.996527] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.000364] env[61905]: INFO nova.compute.claims [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 510.246453] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 510.409010] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquiring lock "33acde25-4310-4b16-bd9e-6ef8e27b49b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.409323] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Lock "33acde25-4310-4b16-bd9e-6ef8e27b49b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.774198] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "c6698e84-05f7-4a92-809e-f48e0835a1d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.774198] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "c6698e84-05f7-4a92-809e-f48e0835a1d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.806586] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.914092] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 511.117415] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5d0faa-03e2-4283-b56c-96e566405ecf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.125572] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe738ea-b923-49ad-895e-2a3bfe5326e7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.158748] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdd6af2-1b5e-45ec-a003-a11f8a566c06 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.166726] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125596d9-a6ff-41d2-a0a9-f169d41a5be3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.184314] env[61905]: DEBUG nova.compute.provider_tree [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 511.277179] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 511.443380] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.690043] env[61905]: DEBUG nova.scheduler.client.report [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 511.815589] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.202266] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 512.202760] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 512.206560] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.401s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.207964] env[61905]: INFO nova.compute.claims [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 512.713528] env[61905]: DEBUG nova.compute.utils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 512.715000] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 512.728941] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 513.178134] env[61905]: DEBUG nova.policy [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f40d6d6fd374886a0f29824f37c2328', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a890d180b4d14864955629f63ec18850', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 513.243467] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 513.371764] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71884664-a472-4c9d-8d96-1bdd724dd847 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.382481] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53822637-799e-464a-b70f-43a88c0c23ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.419090] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3669d1b-cd2b-4147-aa7d-ebff7a0871fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.427092] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d9eff0-c01a-4582-b1ae-4d58d052affb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.443615] env[61905]: DEBUG nova.compute.provider_tree [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 513.950718] env[61905]: DEBUG nova.scheduler.client.report [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 514.254517] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 514.283401] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 514.284796] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 514.284796] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 514.284796] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 514.285019] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 514.285136] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 514.285351] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 514.285561] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 514.286121] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 514.286381] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 514.286559] env[61905]: DEBUG nova.virt.hardware [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 514.288235] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb63c87b-6ae0-4c01-b535-dce78f5ed360 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.298989] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d69c46e-4c57-4153-a14f-12e5bd6ebf27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.319607] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fecaae3-6fc3-46a1-bf94-34ff6575b9f8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.461064] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.463323] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 514.468411] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.024s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.470191] env[61905]: INFO nova.compute.claims [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 514.788510] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Successfully created port: 41b6fe4e-e287-4d73-b440-f4d6f49c1b57 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 514.977010] env[61905]: DEBUG nova.compute.utils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 514.986418] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 514.986418] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 515.323887] env[61905]: DEBUG nova.policy [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '599417cf5d4141e3aa4aff687bb5edb4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '740b5d5281be4d269a47c7af541c8fd8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 515.493341] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 515.596340] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45f90bf-dc2c-415d-96ea-efd893f5cac7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.604722] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292cd7af-43a9-46c0-9f11-38dd8ce0b76a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.639114] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3561afa4-b468-4709-90c4-8dc7cfadf0f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.645035] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6658c300-0075-49f6-b29a-bc64bff1dc24 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.660168] env[61905]: DEBUG nova.compute.provider_tree [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 516.164331] env[61905]: DEBUG nova.scheduler.client.report [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 516.357638] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Successfully created port: 4eef8260-8d90-4023-b4c1-d9cc57b224ac {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 516.511899] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 516.538916] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 516.538916] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 516.541422] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 516.541422] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 516.541422] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 516.541422] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 516.541422] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 516.541669] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 516.541669] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 516.541669] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 516.541669] env[61905]: DEBUG nova.virt.hardware [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 516.542569] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e1f978-3460-4283-a915-cc20960cb007 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.553602] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcf5c58-6be1-4ffe-bcd2-af83235ee4ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.675137] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 516.676371] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 516.678346] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.863s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.679728] env[61905]: INFO nova.compute.claims [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 517.189219] env[61905]: DEBUG nova.compute.utils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 517.192467] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 517.196416] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 517.494077] env[61905]: DEBUG nova.policy [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fbec038e20f4308aa62eeba2bd7772b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10993687c12642479bfa3bbd030e9291', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 517.695114] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 517.784867] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e51d25-6b6b-4f29-b632-f0c2ef4e711e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.796331] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab038903-6b9e-4e01-8c16-4be70b4890c2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.836056] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f7758a-49ff-4408-92bb-31e96628423f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.844734] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f55572-8886-4027-8f2f-ab2ea4f9709e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.866882] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.370437] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 518.414815] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.414815] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.414815] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 518.414815] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 518.709553] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 518.750295] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 518.750295] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 518.750557] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 518.750605] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 518.750711] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 518.750861] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 518.751984] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 518.753641] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 518.753974] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 518.754197] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 518.754197] env[61905]: DEBUG nova.virt.hardware [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 518.755231] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152f4d80-455a-4502-8fe6-c91cc5e919a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.766473] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92868858-6375-43f9-9e6e-e0b6288e9a9b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.884400] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.884913] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 518.923066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 518.923066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 518.923066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 518.923066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 518.923066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Didn't find any instances for network info cache update. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 518.923066] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.923402] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.923402] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.923534] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.923731] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.923907] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.925236] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 518.925953] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 519.380550] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Successfully created port: 4638735b-1be9-4df2-9a4e-2f5f17832f4f {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.394395] env[61905]: DEBUG nova.compute.utils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.396029] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 519.396029] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 519.432191] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.432447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.002s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.432885] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.432885] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 519.433646] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1b6590-bfd4-404a-bd86-6ca7b70a8576 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.447982] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa7725e-57b1-423c-b79f-2062e9290ac0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.467673] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eb750a-b892-4ff3-8f84-621f34da7579 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.476582] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e38d59-22eb-4b51-a137-254f325198e8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.519017] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181482MB free_disk=150GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 519.519017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.519017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.736741] env[61905]: DEBUG nova.policy [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '127aec483d144a068c156781a8ac4f2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6004364faeec4f4b8327b4962a864f74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.901701] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 520.551263] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9cacbcf0-63a4-4926-b007-07657b164e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 520.551466] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance aa565525-d5b0-4dc4-9f20-30542ee3e52f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 520.551712] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 33acde25-4310-4b16-bd9e-6ef8e27b49b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 520.551839] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c6698e84-05f7-4a92-809e-f48e0835a1d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 520.809774] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquiring lock "014849e7-a41c-432e-81ae-03725825166e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.811581] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Lock "014849e7-a41c-432e-81ae-03725825166e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.918393] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 520.952280] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.952525] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.952676] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.952843] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.952978] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.956556] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.956829] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.957009] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.957190] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.957358] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.957530] env[61905]: DEBUG nova.virt.hardware [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.958493] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c77afdc-ae74-44be-85db-9bed25d46ba1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.970876] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107ab9df-05d1-42c0-9c9f-249c5ab8c1aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.058703] env[61905]: INFO nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 014849e7-a41c-432e-81ae-03725825166e has allocations against this compute host but is not found in the database. [ 521.058775] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 521.058940] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 521.184270] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98ef03d-cbdb-4cf4-822f-a91d7653a847 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.194251] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85319c8-90da-497e-b324-7f0a291c3108 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.229049] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3dcaba-e21e-48e6-bf38-0eaa65c34cf8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.236793] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ce2a32-fd7d-46d0-8877-d6c24ec0c21b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.253882] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.314811] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 521.548091] env[61905]: ERROR nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 521.548091] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.548091] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.548091] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.548091] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.548091] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.548091] env[61905]: ERROR nova.compute.manager raise self.value [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.548091] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 521.548091] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.548091] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 521.548561] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.548561] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 521.548561] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 521.548561] env[61905]: ERROR nova.compute.manager [ 521.550904] env[61905]: Traceback (most recent call last): [ 521.550904] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 521.550904] env[61905]: listener.cb(fileno) [ 521.550904] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.550904] env[61905]: result = function(*args, **kwargs) [ 521.550904] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.550904] env[61905]: return func(*args, **kwargs) [ 521.550904] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.550904] env[61905]: raise e [ 521.550904] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.550904] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 521.550904] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.550904] env[61905]: created_port_ids = self._update_ports_for_instance( [ 521.550904] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.550904] env[61905]: with excutils.save_and_reraise_exception(): [ 521.550904] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.550904] env[61905]: self.force_reraise() [ 521.550904] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.550904] env[61905]: raise self.value [ 521.550904] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.550904] env[61905]: updated_port = self._update_port( [ 521.550904] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.550904] env[61905]: _ensure_no_port_binding_failure(port) [ 521.550904] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.550904] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 521.550904] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 521.550904] env[61905]: Removing descriptor: 16 [ 521.553127] env[61905]: ERROR nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Traceback (most recent call last): [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] yield resources [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self.driver.spawn(context, instance, image_meta, [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] vm_ref = self.build_virtual_machine(instance, [ 521.553127] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] vif_infos = vmwarevif.get_vif_info(self._session, [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] for vif in network_info: [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return self._sync_wrapper(fn, *args, **kwargs) [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self.wait() [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self[:] = self._gt.wait() [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return self._exit_event.wait() [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 521.553602] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] result = hub.switch() [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return self.greenlet.switch() [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] result = function(*args, **kwargs) [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return func(*args, **kwargs) [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] raise e [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] nwinfo = self.network_api.allocate_for_instance( [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] created_port_ids = self._update_ports_for_instance( [ 521.554938] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] with excutils.save_and_reraise_exception(): [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self.force_reraise() [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] raise self.value [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] updated_port = self._update_port( [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] _ensure_no_port_binding_failure(port) [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] raise exception.PortBindingFailed(port_id=port['id']) [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 521.555682] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] [ 521.556983] env[61905]: INFO nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Terminating instance [ 521.556983] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.556983] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquired lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.556983] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 521.694187] env[61905]: ERROR nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 521.694187] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.694187] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.694187] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.694187] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.694187] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.694187] env[61905]: ERROR nova.compute.manager raise self.value [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.694187] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 521.694187] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.694187] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 521.694663] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.694663] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 521.694663] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 521.694663] env[61905]: ERROR nova.compute.manager [ 521.694663] env[61905]: Traceback (most recent call last): [ 521.694663] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 521.694663] env[61905]: listener.cb(fileno) [ 521.694663] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.694663] env[61905]: result = function(*args, **kwargs) [ 521.694663] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.694663] env[61905]: return func(*args, **kwargs) [ 521.694663] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.694663] env[61905]: raise e [ 521.694663] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.694663] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 521.694663] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.694663] env[61905]: created_port_ids = self._update_ports_for_instance( [ 521.694663] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.694663] env[61905]: with excutils.save_and_reraise_exception(): [ 521.694663] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.694663] env[61905]: self.force_reraise() [ 521.694663] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.694663] env[61905]: raise self.value [ 521.694663] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.694663] env[61905]: updated_port = self._update_port( [ 521.694663] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.694663] env[61905]: _ensure_no_port_binding_failure(port) [ 521.694663] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.694663] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 521.695722] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 521.695722] env[61905]: Removing descriptor: 15 [ 521.695722] env[61905]: ERROR nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Traceback (most recent call last): [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] yield resources [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self.driver.spawn(context, instance, image_meta, [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 521.695722] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] vm_ref = self.build_virtual_machine(instance, [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] vif_infos = vmwarevif.get_vif_info(self._session, [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] for vif in network_info: [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return self._sync_wrapper(fn, *args, **kwargs) [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self.wait() [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self[:] = self._gt.wait() [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return self._exit_event.wait() [ 521.696041] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] result = hub.switch() [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return self.greenlet.switch() [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] result = function(*args, **kwargs) [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return func(*args, **kwargs) [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] raise e [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] nwinfo = self.network_api.allocate_for_instance( [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 521.696363] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] created_port_ids = self._update_ports_for_instance( [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] with excutils.save_and_reraise_exception(): [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self.force_reraise() [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] raise self.value [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] updated_port = self._update_port( [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] _ensure_no_port_binding_failure(port) [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 521.699879] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] raise exception.PortBindingFailed(port_id=port['id']) [ 521.700336] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 521.700336] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] [ 521.700336] env[61905]: INFO nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Terminating instance [ 521.700336] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquiring lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.700336] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquired lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.700336] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 521.736136] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Successfully created port: b1760e30-b41c-4177-858f-d57ca7694866 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.756569] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 521.774120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquiring lock "708b8fc5-a919-449a-a8bb-0c0d3a40b952" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.774120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Lock "708b8fc5-a919-449a-a8bb-0c0d3a40b952" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.841569] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.163204] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.264064] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 522.264220] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.746s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.264449] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.423s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.265945] env[61905]: INFO nova.compute.claims [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.270321] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.277569] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 522.375790] env[61905]: DEBUG nova.compute.manager [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Received event network-changed-41b6fe4e-e287-4d73-b440-f4d6f49c1b57 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 522.375982] env[61905]: DEBUG nova.compute.manager [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Refreshing instance network info cache due to event network-changed-41b6fe4e-e287-4d73-b440-f4d6f49c1b57. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 522.376233] env[61905]: DEBUG oslo_concurrency.lockutils [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] Acquiring lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.440118] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.514680] env[61905]: DEBUG nova.compute.manager [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Received event network-changed-4eef8260-8d90-4023-b4c1-d9cc57b224ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 522.514889] env[61905]: DEBUG nova.compute.manager [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Refreshing instance network info cache due to event network-changed-4eef8260-8d90-4023-b4c1-d9cc57b224ac. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 522.515744] env[61905]: DEBUG oslo_concurrency.lockutils [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] Acquiring lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.534081] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.810367] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 522.942365] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Releasing lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.943230] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 522.943322] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 522.943648] env[61905]: DEBUG oslo_concurrency.lockutils [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] Acquired lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.943821] env[61905]: DEBUG nova.network.neutron [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Refreshing network info cache for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 522.944912] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a87f81e-3537-4152-a2a8-66d735549e5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.955238] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9331f9-8b88-4209-9680-5b7248537157 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.978576] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9cacbcf0-63a4-4926-b007-07657b164e99 could not be found. [ 522.978950] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 522.979453] env[61905]: INFO nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Took 0.04 seconds to destroy the instance on the hypervisor. [ 522.979885] env[61905]: DEBUG oslo.service.loopingcall [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.980091] env[61905]: DEBUG nova.compute.manager [-] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 522.980234] env[61905]: DEBUG nova.network.neutron [-] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 523.036759] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Releasing lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 523.037136] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 523.037328] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 523.037616] env[61905]: DEBUG oslo_concurrency.lockutils [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] Acquired lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.037778] env[61905]: DEBUG nova.network.neutron [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Refreshing network info cache for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 523.039874] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0e32457-bf5f-45aa-a164-27b5a277fcdf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.048984] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fdec04-b472-4df2-9bfe-6e242c372889 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.063933] env[61905]: DEBUG nova.network.neutron [-] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.079377] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa565525-d5b0-4dc4-9f20-30542ee3e52f could not be found. [ 523.080115] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 523.083290] env[61905]: INFO nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 523.085524] env[61905]: DEBUG oslo.service.loopingcall [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.085524] env[61905]: DEBUG nova.compute.manager [-] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 523.085524] env[61905]: DEBUG nova.network.neutron [-] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 523.141220] env[61905]: DEBUG nova.network.neutron [-] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.388050] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14c84c1-4435-4422-aab9-3168264b95db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.397229] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be539410-bebb-4e4f-9aad-76c72c4cc537 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.437512] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456bfdc4-5711-4849-bcf5-fe9518239501 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.445353] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35213e0-9b7c-4391-81e5-846291915f0d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.461122] env[61905]: DEBUG nova.compute.provider_tree [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.507204] env[61905]: DEBUG nova.network.neutron [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.567140] env[61905]: DEBUG nova.network.neutron [-] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.610283] env[61905]: DEBUG nova.network.neutron [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.643506] env[61905]: DEBUG nova.network.neutron [-] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.819014] env[61905]: DEBUG nova.network.neutron [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.820761] env[61905]: DEBUG nova.network.neutron [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.963844] env[61905]: DEBUG nova.scheduler.client.report [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 524.071976] env[61905]: INFO nova.compute.manager [-] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Took 1.09 seconds to deallocate network for instance. [ 524.078954] env[61905]: DEBUG nova.compute.claims [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 524.078954] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.119638] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquiring lock "056d4c90-e763-4488-a349-1adaddfc9e95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.119941] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Lock "056d4c90-e763-4488-a349-1adaddfc9e95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.149601] env[61905]: INFO nova.compute.manager [-] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Took 1.06 seconds to deallocate network for instance. [ 524.153954] env[61905]: DEBUG nova.compute.claims [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 524.153954] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.329337] env[61905]: DEBUG oslo_concurrency.lockutils [req-74d6f1d5-9232-41df-9c4f-6ee472381918 req-7582bb19-538b-4ccf-a987-f5d5be6bdb39 service nova] Releasing lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.329851] env[61905]: DEBUG oslo_concurrency.lockutils [req-103dcd77-77ab-4e25-9893-ef0cd3599730 req-de5e5c30-b24a-4fbf-9069-e6289cb0d3e3 service nova] Releasing lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.472447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.473498] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 524.481238] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.668s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.481238] env[61905]: INFO nova.compute.claims [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 524.626099] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 524.987060] env[61905]: DEBUG nova.compute.utils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 524.987060] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 524.987060] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 525.129851] env[61905]: DEBUG nova.policy [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cbab3130cf64db9a40c488a9379a9f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8729fe18202e49cdb1a271ab0de6a67b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 525.153466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.494843] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 525.694669] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "c4642eab-5ac8-41c4-93a5-8429525f5120" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.694988] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "c4642eab-5ac8-41c4-93a5-8429525f5120" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.698320] env[61905]: ERROR nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 525.698320] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.698320] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.698320] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.698320] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.698320] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.698320] env[61905]: ERROR nova.compute.manager raise self.value [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.698320] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 525.698320] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.698320] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 525.700494] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.700494] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 525.700494] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 525.700494] env[61905]: ERROR nova.compute.manager [ 525.700494] env[61905]: Traceback (most recent call last): [ 525.700494] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 525.700494] env[61905]: listener.cb(fileno) [ 525.700494] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.700494] env[61905]: result = function(*args, **kwargs) [ 525.700494] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 525.700494] env[61905]: return func(*args, **kwargs) [ 525.700494] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 525.700494] env[61905]: raise e [ 525.700494] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.700494] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 525.700494] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.700494] env[61905]: created_port_ids = self._update_ports_for_instance( [ 525.700494] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.700494] env[61905]: with excutils.save_and_reraise_exception(): [ 525.700494] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.700494] env[61905]: self.force_reraise() [ 525.700494] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.700494] env[61905]: raise self.value [ 525.700494] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.700494] env[61905]: updated_port = self._update_port( [ 525.700494] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.700494] env[61905]: _ensure_no_port_binding_failure(port) [ 525.700494] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.700494] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 525.701432] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 525.701432] env[61905]: Removing descriptor: 17 [ 525.701432] env[61905]: ERROR nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Traceback (most recent call last): [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] yield resources [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self.driver.spawn(context, instance, image_meta, [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 525.701432] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] vm_ref = self.build_virtual_machine(instance, [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] vif_infos = vmwarevif.get_vif_info(self._session, [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] for vif in network_info: [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return self._sync_wrapper(fn, *args, **kwargs) [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self.wait() [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self[:] = self._gt.wait() [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return self._exit_event.wait() [ 525.701747] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] result = hub.switch() [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return self.greenlet.switch() [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] result = function(*args, **kwargs) [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return func(*args, **kwargs) [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] raise e [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] nwinfo = self.network_api.allocate_for_instance( [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 525.702100] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] created_port_ids = self._update_ports_for_instance( [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] with excutils.save_and_reraise_exception(): [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self.force_reraise() [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] raise self.value [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] updated_port = self._update_port( [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] _ensure_no_port_binding_failure(port) [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 525.702428] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] raise exception.PortBindingFailed(port_id=port['id']) [ 525.702791] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 525.702791] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] [ 525.702791] env[61905]: INFO nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Terminating instance [ 525.707669] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquiring lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.707669] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquired lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.707669] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 525.796030] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36acf7be-52f9-47e9-bad4-c3b1fc860097 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.804965] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f09d09-2f2d-4493-9bfe-a099f2ca2c96 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.839849] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0afaaaa-875b-4aea-becf-0d3d954ab7f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.847800] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36707ed1-9463-4c92-beb6-192443bbf753 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.862943] env[61905]: DEBUG nova.compute.provider_tree [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.109658] env[61905]: DEBUG nova.compute.manager [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Received event network-vif-deleted-41b6fe4e-e287-4d73-b440-f4d6f49c1b57 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.109658] env[61905]: DEBUG nova.compute.manager [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Received event network-changed-4638735b-1be9-4df2-9a4e-2f5f17832f4f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.109658] env[61905]: DEBUG nova.compute.manager [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Refreshing instance network info cache due to event network-changed-4638735b-1be9-4df2-9a4e-2f5f17832f4f. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 526.110099] env[61905]: DEBUG oslo_concurrency.lockutils [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] Acquiring lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.205065] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 526.266874] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 526.288610] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Successfully created port: 7273f9ef-bdfa-470b-b60e-a9bfc84f3155 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.366283] env[61905]: DEBUG nova.scheduler.client.report [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 526.506022] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.518838] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 526.550695] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 526.552783] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 526.552783] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.552783] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 526.552783] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.552783] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 526.553012] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 526.553012] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 526.553012] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 526.553012] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 526.553188] env[61905]: DEBUG nova.virt.hardware [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 526.554558] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c572329c-246b-40ed-9d75-71e89ec4124d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.564750] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be94b854-f184-4629-b3de-e349391a5177 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.687884] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquiring lock "d3ec2174-203c-43c6-9ecc-0a0d42fc35df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.688164] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Lock "d3ec2174-203c-43c6-9ecc-0a0d42fc35df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.690601] env[61905]: DEBUG nova.compute.manager [req-e1a9f3bf-1cff-4fe7-a3a2-39a38641a979 req-015c7cf1-58e2-421d-b009-b5e3bd22c227 service nova] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Received event network-vif-deleted-4eef8260-8d90-4023-b4c1-d9cc57b224ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.736674] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.873212] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.873870] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 526.880505] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 2.800s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.010196] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Releasing lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.010856] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 527.011068] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 527.011693] env[61905]: DEBUG oslo_concurrency.lockutils [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] Acquired lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.011693] env[61905]: DEBUG nova.network.neutron [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Refreshing network info cache for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 527.012745] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1b5da98-7455-4029-bb70-74e7dfb55ce5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.031536] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691a2dcc-fed4-4523-96a2-c7998640fc7e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.062524] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 33acde25-4310-4b16-bd9e-6ef8e27b49b6 could not be found. [ 527.062878] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 527.062977] env[61905]: INFO nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 527.063244] env[61905]: DEBUG oslo.service.loopingcall [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 527.063448] env[61905]: DEBUG nova.compute.manager [-] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 527.063547] env[61905]: DEBUG nova.network.neutron [-] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 527.105132] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Acquiring lock "82d50809-1df7-4055-97e8-863c0bdde21f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.105132] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Lock "82d50809-1df7-4055-97e8-863c0bdde21f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.118790] env[61905]: DEBUG nova.network.neutron [-] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.193247] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 527.376899] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "72770472-1b79-4408-b32c-34e56fd27c45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.376899] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "72770472-1b79-4408-b32c-34e56fd27c45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.384200] env[61905]: DEBUG nova.compute.utils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 527.393670] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 527.393850] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 527.575169] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80420cc6-eb7b-46fb-ad4a-f38ed53e4b2f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.584433] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42fdaa-9bf8-4afd-be02-9180b1b1cbb4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.621658] env[61905]: DEBUG nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 527.625332] env[61905]: DEBUG nova.network.neutron [-] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.627248] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d1bb96-3ba0-4bf4-ba80-a1c339ec64f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.639909] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d86718b-b3ed-43aa-9234-01a4a96bd244 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.657170] env[61905]: DEBUG nova.compute.provider_tree [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 527.659483] env[61905]: DEBUG nova.network.neutron [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 527.670731] env[61905]: DEBUG nova.policy [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4965bf3a77c541d291f5ce24345fd1a6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c3dd1af301471fa66a404622f9ceeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 527.724829] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.895105] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 527.928585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "70c8f43f-28f0-4097-a8cb-37f6654ec014" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.928585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "70c8f43f-28f0-4097-a8cb-37f6654ec014" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.041619] env[61905]: DEBUG nova.network.neutron [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.132155] env[61905]: INFO nova.compute.manager [-] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Took 1.07 seconds to deallocate network for instance. [ 528.136030] env[61905]: DEBUG nova.compute.claims [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 528.136030] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.151491] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.164311] env[61905]: DEBUG nova.scheduler.client.report [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.283335] env[61905]: ERROR nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 528.283335] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.283335] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.283335] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.283335] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.283335] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.283335] env[61905]: ERROR nova.compute.manager raise self.value [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.283335] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 528.283335] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.283335] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 528.283857] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.283857] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 528.283857] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 528.283857] env[61905]: ERROR nova.compute.manager [ 528.283857] env[61905]: Traceback (most recent call last): [ 528.283857] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 528.283857] env[61905]: listener.cb(fileno) [ 528.283857] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.283857] env[61905]: result = function(*args, **kwargs) [ 528.283857] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.283857] env[61905]: return func(*args, **kwargs) [ 528.283857] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 528.283857] env[61905]: raise e [ 528.283857] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.283857] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 528.283857] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.283857] env[61905]: created_port_ids = self._update_ports_for_instance( [ 528.283857] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.283857] env[61905]: with excutils.save_and_reraise_exception(): [ 528.283857] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.283857] env[61905]: self.force_reraise() [ 528.283857] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.283857] env[61905]: raise self.value [ 528.283857] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.283857] env[61905]: updated_port = self._update_port( [ 528.283857] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.283857] env[61905]: _ensure_no_port_binding_failure(port) [ 528.283857] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.283857] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 528.284600] env[61905]: nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 528.284600] env[61905]: Removing descriptor: 18 [ 528.284667] env[61905]: ERROR nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Traceback (most recent call last): [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] yield resources [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self.driver.spawn(context, instance, image_meta, [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] vm_ref = self.build_virtual_machine(instance, [ 528.284667] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] for vif in network_info: [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return self._sync_wrapper(fn, *args, **kwargs) [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self.wait() [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self[:] = self._gt.wait() [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return self._exit_event.wait() [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 528.284963] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] result = hub.switch() [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return self.greenlet.switch() [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] result = function(*args, **kwargs) [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return func(*args, **kwargs) [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] raise e [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] nwinfo = self.network_api.allocate_for_instance( [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] created_port_ids = self._update_ports_for_instance( [ 528.285308] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] with excutils.save_and_reraise_exception(): [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self.force_reraise() [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] raise self.value [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] updated_port = self._update_port( [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] _ensure_no_port_binding_failure(port) [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] raise exception.PortBindingFailed(port_id=port['id']) [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 528.285965] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] [ 528.286471] env[61905]: INFO nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Terminating instance [ 528.287489] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.287568] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquired lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.287724] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 528.544410] env[61905]: DEBUG oslo_concurrency.lockutils [req-138dd53f-600b-4e9b-9e2d-f48f535a365a req-d9b23a8c-87ab-4784-aca4-f21c3c81ac87 service nova] Releasing lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.673877] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.795s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 528.674557] env[61905]: ERROR nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Traceback (most recent call last): [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self.driver.spawn(context, instance, image_meta, [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] vm_ref = self.build_virtual_machine(instance, [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] vif_infos = vmwarevif.get_vif_info(self._session, [ 528.674557] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] for vif in network_info: [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return self._sync_wrapper(fn, *args, **kwargs) [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self.wait() [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self[:] = self._gt.wait() [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return self._exit_event.wait() [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] result = hub.switch() [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 528.674889] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return self.greenlet.switch() [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] result = function(*args, **kwargs) [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] return func(*args, **kwargs) [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] raise e [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] nwinfo = self.network_api.allocate_for_instance( [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] created_port_ids = self._update_ports_for_instance( [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] with excutils.save_and_reraise_exception(): [ 528.675443] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] self.force_reraise() [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] raise self.value [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] updated_port = self._update_port( [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] _ensure_no_port_binding_failure(port) [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] raise exception.PortBindingFailed(port_id=port['id']) [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] nova.exception.PortBindingFailed: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. [ 528.675881] env[61905]: ERROR nova.compute.manager [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] [ 528.676202] env[61905]: DEBUG nova.compute.utils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 528.677348] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.524s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 528.686845] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Build of instance 9cacbcf0-63a4-4926-b007-07657b164e99 was re-scheduled: Binding failed for port 41b6fe4e-e287-4d73-b440-f4d6f49c1b57, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 528.687366] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 528.687584] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.687721] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquired lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.687873] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 528.823950] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 528.910770] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 528.947075] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 528.947512] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 528.947862] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 528.947978] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 528.949976] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 528.950206] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 528.950456] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 528.950628] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 528.950867] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 528.951064] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 528.951235] env[61905]: DEBUG nova.virt.hardware [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 528.952517] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9433fd0-0c66-443f-8a0b-c3c56bd0a844 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.963575] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7154e7f7-8f2f-427d-9f60-e22dccd9e30c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.999186] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.147342] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Successfully created port: abe38819-3cdb-4ba8-8355-c44ed7ae00c9 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 529.219142] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquiring lock "46ce0987-e757-4ec7-9f85-bd84e50f2324" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.219142] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Lock "46ce0987-e757-4ec7-9f85-bd84e50f2324" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.256652] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.463150] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.490748] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquiring lock "195ac5ee-8da4-41e9-8c1b-291ea09c80c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.491058] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Lock "195ac5ee-8da4-41e9-8c1b-291ea09c80c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.498585] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48329c31-b70d-47d4-92d6-f5e4e16d966a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.502383] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Releasing lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.502880] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 529.503099] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 529.503787] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfb7b9bd-5600-499a-994d-e1e9500f42a0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.518639] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66b4753-2430-4b1c-82f5-64269d1099c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.560069] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21201b65-c0dc-4e05-8875-bd74fb8d75a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.573694] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42512e5-a01a-40cf-82e3-ef16ffc8a8c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.585522] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc1458c-8fb6-46de-adf0-656e3a1c3f59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.592101] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6698e84-05f7-4a92-809e-f48e0835a1d2 could not be found. [ 529.592319] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 529.592495] env[61905]: INFO nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Took 0.09 seconds to destroy the instance on the hypervisor. [ 529.592737] env[61905]: DEBUG oslo.service.loopingcall [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 529.593321] env[61905]: DEBUG nova.compute.manager [-] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 529.593421] env[61905]: DEBUG nova.network.neutron [-] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 529.607487] env[61905]: DEBUG nova.compute.provider_tree [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.647959] env[61905]: DEBUG nova.network.neutron [-] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.843213] env[61905]: DEBUG nova.compute.manager [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Received event network-vif-deleted-4638735b-1be9-4df2-9a4e-2f5f17832f4f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 529.843213] env[61905]: DEBUG nova.compute.manager [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Received event network-changed-b1760e30-b41c-4177-858f-d57ca7694866 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 529.843213] env[61905]: DEBUG nova.compute.manager [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Refreshing instance network info cache due to event network-changed-b1760e30-b41c-4177-858f-d57ca7694866. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 529.843213] env[61905]: DEBUG oslo_concurrency.lockutils [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] Acquiring lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.843213] env[61905]: DEBUG oslo_concurrency.lockutils [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] Acquired lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.843411] env[61905]: DEBUG nova.network.neutron [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Refreshing network info cache for port b1760e30-b41c-4177-858f-d57ca7694866 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 529.971224] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Releasing lock "refresh_cache-9cacbcf0-63a4-4926-b007-07657b164e99" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.971508] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 529.971674] env[61905]: DEBUG nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 529.971836] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 530.015753] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.114030] env[61905]: DEBUG nova.scheduler.client.report [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 530.149825] env[61905]: DEBUG nova.network.neutron [-] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.392696] env[61905]: DEBUG nova.network.neutron [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.521853] env[61905]: DEBUG nova.network.neutron [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.591735] env[61905]: DEBUG nova.network.neutron [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.618058] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.941s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.618955] env[61905]: ERROR nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Traceback (most recent call last): [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self.driver.spawn(context, instance, image_meta, [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] vm_ref = self.build_virtual_machine(instance, [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.618955] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] for vif in network_info: [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return self._sync_wrapper(fn, *args, **kwargs) [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self.wait() [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self[:] = self._gt.wait() [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return self._exit_event.wait() [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] result = hub.switch() [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.619322] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return self.greenlet.switch() [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] result = function(*args, **kwargs) [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] return func(*args, **kwargs) [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] raise e [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] nwinfo = self.network_api.allocate_for_instance( [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] created_port_ids = self._update_ports_for_instance( [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] with excutils.save_and_reraise_exception(): [ 530.619685] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] self.force_reraise() [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] raise self.value [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] updated_port = self._update_port( [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] _ensure_no_port_binding_failure(port) [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] raise exception.PortBindingFailed(port_id=port['id']) [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] nova.exception.PortBindingFailed: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. [ 530.620112] env[61905]: ERROR nova.compute.manager [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] [ 530.620430] env[61905]: DEBUG nova.compute.utils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 530.621873] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Build of instance aa565525-d5b0-4dc4-9f20-30542ee3e52f was re-scheduled: Binding failed for port 4eef8260-8d90-4023-b4c1-d9cc57b224ac, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 530.622415] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 530.622518] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquiring lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.622642] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Acquired lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.622790] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 530.623788] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.470s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.626730] env[61905]: INFO nova.compute.claims [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 530.655073] env[61905]: INFO nova.compute.manager [-] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Took 1.06 seconds to deallocate network for instance. [ 530.659786] env[61905]: DEBUG nova.compute.claims [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 530.659969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.029902] env[61905]: INFO nova.compute.manager [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: 9cacbcf0-63a4-4926-b007-07657b164e99] Took 1.05 seconds to deallocate network for instance. [ 531.096509] env[61905]: DEBUG oslo_concurrency.lockutils [req-92c35e28-c598-43c6-902e-7ec2d79ead2a req-8ca1c232-fdad-4c50-b085-9bb841264578 service nova] Releasing lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.198640] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.287811] env[61905]: ERROR nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 531.287811] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.287811] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.287811] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.287811] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.287811] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.287811] env[61905]: ERROR nova.compute.manager raise self.value [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.287811] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 531.287811] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.287811] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 531.288325] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.288325] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 531.288325] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 531.288325] env[61905]: ERROR nova.compute.manager [ 531.288325] env[61905]: Traceback (most recent call last): [ 531.288325] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 531.288325] env[61905]: listener.cb(fileno) [ 531.288325] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.288325] env[61905]: result = function(*args, **kwargs) [ 531.288325] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.288325] env[61905]: return func(*args, **kwargs) [ 531.288325] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 531.288325] env[61905]: raise e [ 531.288325] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.288325] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 531.288325] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.288325] env[61905]: created_port_ids = self._update_ports_for_instance( [ 531.288325] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.288325] env[61905]: with excutils.save_and_reraise_exception(): [ 531.288325] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.288325] env[61905]: self.force_reraise() [ 531.288325] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.288325] env[61905]: raise self.value [ 531.288325] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.288325] env[61905]: updated_port = self._update_port( [ 531.288325] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.288325] env[61905]: _ensure_no_port_binding_failure(port) [ 531.288325] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.288325] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 531.289081] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 531.289081] env[61905]: Removing descriptor: 15 [ 531.290595] env[61905]: ERROR nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] Traceback (most recent call last): [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] yield resources [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self.driver.spawn(context, instance, image_meta, [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] vm_ref = self.build_virtual_machine(instance, [ 531.290595] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] vif_infos = vmwarevif.get_vif_info(self._session, [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] for vif in network_info: [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return self._sync_wrapper(fn, *args, **kwargs) [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self.wait() [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self[:] = self._gt.wait() [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return self._exit_event.wait() [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 531.290989] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] result = hub.switch() [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return self.greenlet.switch() [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] result = function(*args, **kwargs) [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return func(*args, **kwargs) [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] raise e [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] nwinfo = self.network_api.allocate_for_instance( [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] created_port_ids = self._update_ports_for_instance( [ 531.291391] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] with excutils.save_and_reraise_exception(): [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self.force_reraise() [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] raise self.value [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] updated_port = self._update_port( [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] _ensure_no_port_binding_failure(port) [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] raise exception.PortBindingFailed(port_id=port['id']) [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 531.291744] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] [ 531.292105] env[61905]: INFO nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Terminating instance [ 531.294525] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquiring lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.294782] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquired lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.294844] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 531.347776] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.398091] env[61905]: DEBUG nova.compute.manager [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] [instance: 014849e7-a41c-432e-81ae-03725825166e] Received event network-changed-7273f9ef-bdfa-470b-b60e-a9bfc84f3155 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 531.398264] env[61905]: DEBUG nova.compute.manager [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] [instance: 014849e7-a41c-432e-81ae-03725825166e] Refreshing instance network info cache due to event network-changed-7273f9ef-bdfa-470b-b60e-a9bfc84f3155. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 531.398454] env[61905]: DEBUG oslo_concurrency.lockutils [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] Acquiring lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.852108] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Releasing lock "refresh_cache-aa565525-d5b0-4dc4-9f20-30542ee3e52f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.852402] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 531.853263] env[61905]: DEBUG nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 531.853263] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 531.870837] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.902557] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92024db8-abba-4e99-8943-2fbd8b12365c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.910836] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3a75ab-e381-4fce-b798-a13ea74f3587 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.943062] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7070b4-3db9-461c-a346-31df000c72f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.952369] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a107ef-6fe4-49b7-9556-b8ceb174c5ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.966141] env[61905]: DEBUG nova.compute.provider_tree [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 532.000715] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.082439] env[61905]: INFO nova.scheduler.client.report [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Deleted allocations for instance 9cacbcf0-63a4-4926-b007-07657b164e99 [ 532.134617] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.376839] env[61905]: ERROR nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 532.376839] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.376839] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.376839] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.376839] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.376839] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.376839] env[61905]: ERROR nova.compute.manager raise self.value [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.376839] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 532.376839] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.376839] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 532.377670] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.377670] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 532.377670] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 532.377670] env[61905]: ERROR nova.compute.manager [ 532.377670] env[61905]: Traceback (most recent call last): [ 532.377670] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 532.377670] env[61905]: listener.cb(fileno) [ 532.377670] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.377670] env[61905]: result = function(*args, **kwargs) [ 532.377670] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.377670] env[61905]: return func(*args, **kwargs) [ 532.377670] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.377670] env[61905]: raise e [ 532.377670] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.377670] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 532.377670] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.377670] env[61905]: created_port_ids = self._update_ports_for_instance( [ 532.377670] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.377670] env[61905]: with excutils.save_and_reraise_exception(): [ 532.377670] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.377670] env[61905]: self.force_reraise() [ 532.377670] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.377670] env[61905]: raise self.value [ 532.377670] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.377670] env[61905]: updated_port = self._update_port( [ 532.377670] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.377670] env[61905]: _ensure_no_port_binding_failure(port) [ 532.377670] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.377670] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 532.378669] env[61905]: nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 532.378669] env[61905]: Removing descriptor: 17 [ 532.378669] env[61905]: ERROR nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Traceback (most recent call last): [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] yield resources [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self.driver.spawn(context, instance, image_meta, [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.378669] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] vm_ref = self.build_virtual_machine(instance, [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] for vif in network_info: [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return self._sync_wrapper(fn, *args, **kwargs) [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self.wait() [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self[:] = self._gt.wait() [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return self._exit_event.wait() [ 532.378958] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] result = hub.switch() [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return self.greenlet.switch() [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] result = function(*args, **kwargs) [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return func(*args, **kwargs) [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] raise e [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] nwinfo = self.network_api.allocate_for_instance( [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.379314] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] created_port_ids = self._update_ports_for_instance( [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] with excutils.save_and_reraise_exception(): [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self.force_reraise() [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] raise self.value [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] updated_port = self._update_port( [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] _ensure_no_port_binding_failure(port) [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.379722] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] raise exception.PortBindingFailed(port_id=port['id']) [ 532.380360] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 532.380360] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] [ 532.380360] env[61905]: INFO nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Terminating instance [ 532.386094] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquiring lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.386094] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquired lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.386094] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 532.468879] env[61905]: DEBUG nova.scheduler.client.report [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 532.506605] env[61905]: DEBUG nova.network.neutron [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.591354] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f84b603d-7510-4845-9177-d1a3c5c7c8dc tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "9cacbcf0-63a4-4926-b007-07657b164e99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.651s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.640427] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Releasing lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.640427] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 532.640427] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 532.640651] env[61905]: DEBUG oslo_concurrency.lockutils [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] Acquired lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.640780] env[61905]: DEBUG nova.network.neutron [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] [instance: 014849e7-a41c-432e-81ae-03725825166e] Refreshing network info cache for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 532.641846] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd3b47a3-706e-4196-9453-a5f91e432562 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.656050] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc9c98c-b5d5-4649-872f-7d4766063ed6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.687227] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 014849e7-a41c-432e-81ae-03725825166e could not be found. [ 532.687227] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 532.687227] env[61905]: INFO nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 532.689409] env[61905]: DEBUG oslo.service.loopingcall [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 532.691045] env[61905]: DEBUG nova.compute.manager [-] [instance: 014849e7-a41c-432e-81ae-03725825166e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 532.691045] env[61905]: DEBUG nova.network.neutron [-] [instance: 014849e7-a41c-432e-81ae-03725825166e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 532.741082] env[61905]: DEBUG nova.network.neutron [-] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.908201] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.925218] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "b788bb84-07b9-4407-9e6e-cac6510166b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.925342] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "b788bb84-07b9-4407-9e6e-cac6510166b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.976792] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.981471] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 532.986915] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.250s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.991727] env[61905]: INFO nova.compute.claims [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.012824] env[61905]: INFO nova.compute.manager [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] [instance: aa565525-d5b0-4dc4-9f20-30542ee3e52f] Took 1.16 seconds to deallocate network for instance. [ 533.048129] env[61905]: DEBUG nova.compute.manager [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Received event network-vif-deleted-b1760e30-b41c-4177-858f-d57ca7694866 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 533.048129] env[61905]: DEBUG nova.compute.manager [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Received event network-changed-abe38819-3cdb-4ba8-8355-c44ed7ae00c9 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 533.048129] env[61905]: DEBUG nova.compute.manager [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Refreshing instance network info cache due to event network-changed-abe38819-3cdb-4ba8-8355-c44ed7ae00c9. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 533.048129] env[61905]: DEBUG oslo_concurrency.lockutils [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] Acquiring lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.096883] env[61905]: DEBUG nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 533.104077] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.199181] env[61905]: DEBUG nova.network.neutron [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.246207] env[61905]: DEBUG nova.network.neutron [-] [instance: 014849e7-a41c-432e-81ae-03725825166e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.475417] env[61905]: DEBUG nova.network.neutron [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] [instance: 014849e7-a41c-432e-81ae-03725825166e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.497916] env[61905]: DEBUG nova.compute.utils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 533.504276] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 533.504800] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 533.607760] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Releasing lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.608097] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 533.608288] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 533.609262] env[61905]: DEBUG oslo_concurrency.lockutils [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] Acquired lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.609630] env[61905]: DEBUG nova.network.neutron [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Refreshing network info cache for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 533.610486] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fce31ed-4f3c-4196-b665-bb3aab2953ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.621451] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a1421b-ce2f-4bff-97af-74aa7cc237a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.635320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.636651] env[61905]: DEBUG nova.policy [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29620092ed684e88a16dc62467ee38da', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82453de5de374b009ba6c22337079071', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 533.651712] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 708b8fc5-a919-449a-a8bb-0c0d3a40b952 could not be found. [ 533.651945] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 533.652216] env[61905]: INFO nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Took 0.04 seconds to destroy the instance on the hypervisor. [ 533.652373] env[61905]: DEBUG oslo.service.loopingcall [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 533.652595] env[61905]: DEBUG nova.compute.manager [-] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 533.652690] env[61905]: DEBUG nova.network.neutron [-] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 533.703785] env[61905]: DEBUG nova.network.neutron [-] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.752188] env[61905]: INFO nova.compute.manager [-] [instance: 014849e7-a41c-432e-81ae-03725825166e] Took 1.06 seconds to deallocate network for instance. [ 533.758168] env[61905]: DEBUG nova.compute.claims [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 533.758168] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.903701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquiring lock "88c496a6-8007-4111-8ac1-6e0f8680ef24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.903701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Lock "88c496a6-8007-4111-8ac1-6e0f8680ef24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.979114] env[61905]: DEBUG oslo_concurrency.lockutils [req-3c6e3314-6944-4d79-92a4-cd9509e7426a req-fcadb606-162e-4d16-bbab-64077a29594e service nova] Releasing lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.005692] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 534.057803] env[61905]: INFO nova.scheduler.client.report [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Deleted allocations for instance aa565525-d5b0-4dc4-9f20-30542ee3e52f [ 534.204830] env[61905]: DEBUG nova.network.neutron [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.206858] env[61905]: DEBUG nova.network.neutron [-] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.307873] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e199260-aaca-4a93-bcbe-aca75cec17f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.317531] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f810d2-f9ae-48ef-84f2-88b5de6e128f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.354064] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9117ce55-7d41-4ba7-91ab-fb2bb3909c4c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.367021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a077025-63cc-44f4-8bdf-d20778915178 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.385689] env[61905]: DEBUG nova.compute.provider_tree [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.572954] env[61905]: DEBUG oslo_concurrency.lockutils [None req-524bb25c-5d25-413f-87ab-af4059fd4334 tempest-TenantUsagesTestJSON-1767562873 tempest-TenantUsagesTestJSON-1767562873-project-member] Lock "aa565525-d5b0-4dc4-9f20-30542ee3e52f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.831s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.628621] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Successfully created port: e3313851-073d-4cae-bf90-bf32e98b2b28 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 534.673168] env[61905]: DEBUG nova.network.neutron [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.713012] env[61905]: INFO nova.compute.manager [-] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Took 1.06 seconds to deallocate network for instance. [ 534.719454] env[61905]: DEBUG nova.compute.claims [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 534.719454] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.891280] env[61905]: DEBUG nova.scheduler.client.report [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.022191] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 535.038518] env[61905]: DEBUG nova.compute.manager [req-368883e6-3903-49df-9207-a3ce40b9036b req-22e7d0a8-b3d3-4a46-9535-e99461ef2991 service nova] [instance: 014849e7-a41c-432e-81ae-03725825166e] Received event network-vif-deleted-7273f9ef-bdfa-470b-b60e-a9bfc84f3155 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 535.075469] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 535.075748] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 535.075899] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 535.076089] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 535.076230] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 535.076366] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 535.076579] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 535.076739] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 535.076893] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 535.077071] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 535.077407] env[61905]: DEBUG nova.virt.hardware [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 535.077631] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 535.082173] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d886b2-0926-4aaf-be20-e08751907873 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.096132] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb4c70c-38e1-4d38-a414-1b762ea44664 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.164196] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.164556] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.175409] env[61905]: DEBUG oslo_concurrency.lockutils [req-5df2631e-2a85-44aa-ac65-ee10b9e6a94d req-6a2ad6b0-b17e-43b5-9aa8-ff452a48b890 service nova] Releasing lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.403386] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.406891] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 535.416448] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.692s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.418780] env[61905]: INFO nova.compute.claims [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.475593] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "2a8bcc04-5519-4890-839b-64dcf422526d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.475835] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "2a8bcc04-5519-4890-839b-64dcf422526d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.616404] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.925515] env[61905]: DEBUG nova.compute.utils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.935192] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 535.935192] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 536.055328] env[61905]: DEBUG nova.policy [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cad66f711e2d4972bca5d9e85395570d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21df24c46bdb48eaab7ebae22a10a668', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 536.157290] env[61905]: DEBUG nova.compute.manager [req-17f819d0-6084-4452-95fa-deaf74a85d7d req-d902af4d-572c-40cf-9a11-9ab8b87b8996 service nova] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Received event network-vif-deleted-abe38819-3cdb-4ba8-8355-c44ed7ae00c9 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.431681] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 536.762745] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb3c6e-931a-4b98-a3bf-76735d6c2df3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.777611] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464789ad-c04c-44b7-83a4-82e4a8a4adc1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.818516] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b8b3e0-77e3-4878-a195-1f60724837a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.829252] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbf87d2-8624-480e-b214-2a8043596bf9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.841844] env[61905]: DEBUG nova.compute.provider_tree [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.328799] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Successfully created port: 8055d540-f6ce-4a84-98b8-f088dca855c8 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 537.331452] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "56997e40-ec60-422d-b58c-8a628d37b1bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.331823] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "56997e40-ec60-422d-b58c-8a628d37b1bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.351104] env[61905]: DEBUG nova.scheduler.client.report [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.445348] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 537.467338] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 537.467590] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 537.467848] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.467923] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 537.468088] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.468240] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 537.470866] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 537.471566] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 537.471566] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 537.471566] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 537.471566] env[61905]: DEBUG nova.virt.hardware [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 537.472288] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24eb90de-639b-412a-8e86-9469f48af27e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.481257] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360619e7-679b-4983-9273-eaaafb05e5fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.858453] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.862129] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 537.863169] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 9.728s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.961653] env[61905]: ERROR nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 537.961653] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.961653] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 537.961653] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 537.961653] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.961653] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.961653] env[61905]: ERROR nova.compute.manager raise self.value [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 537.961653] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 537.961653] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.961653] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 537.962299] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.962299] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 537.962299] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 537.962299] env[61905]: ERROR nova.compute.manager [ 537.962299] env[61905]: Traceback (most recent call last): [ 537.962299] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 537.962299] env[61905]: listener.cb(fileno) [ 537.962299] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.962299] env[61905]: result = function(*args, **kwargs) [ 537.962299] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 537.962299] env[61905]: return func(*args, **kwargs) [ 537.962299] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.962299] env[61905]: raise e [ 537.962299] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.962299] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 537.962299] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 537.962299] env[61905]: created_port_ids = self._update_ports_for_instance( [ 537.962299] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 537.962299] env[61905]: with excutils.save_and_reraise_exception(): [ 537.962299] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.962299] env[61905]: self.force_reraise() [ 537.962299] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.962299] env[61905]: raise self.value [ 537.962299] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 537.962299] env[61905]: updated_port = self._update_port( [ 537.962299] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.962299] env[61905]: _ensure_no_port_binding_failure(port) [ 537.962299] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.962299] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 537.963070] env[61905]: nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 537.963070] env[61905]: Removing descriptor: 17 [ 537.963070] env[61905]: ERROR nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Traceback (most recent call last): [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] yield resources [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self.driver.spawn(context, instance, image_meta, [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 537.963070] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] vm_ref = self.build_virtual_machine(instance, [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] vif_infos = vmwarevif.get_vif_info(self._session, [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] for vif in network_info: [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return self._sync_wrapper(fn, *args, **kwargs) [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self.wait() [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self[:] = self._gt.wait() [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return self._exit_event.wait() [ 537.963410] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] result = hub.switch() [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return self.greenlet.switch() [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] result = function(*args, **kwargs) [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return func(*args, **kwargs) [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] raise e [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] nwinfo = self.network_api.allocate_for_instance( [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 537.963809] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] created_port_ids = self._update_ports_for_instance( [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] with excutils.save_and_reraise_exception(): [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self.force_reraise() [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] raise self.value [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] updated_port = self._update_port( [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] _ensure_no_port_binding_failure(port) [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.964210] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] raise exception.PortBindingFailed(port_id=port['id']) [ 537.964572] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 537.964572] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] [ 537.964572] env[61905]: INFO nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Terminating instance [ 537.966924] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquiring lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.967107] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquired lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.967272] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 538.183613] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquiring lock "79c34bad-f29b-4d5e-97d5-6bfd6be55b31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.184036] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Lock "79c34bad-f29b-4d5e-97d5-6bfd6be55b31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.375180] env[61905]: DEBUG nova.compute.utils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.383047] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 538.383230] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 538.494185] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 538.518419] env[61905]: DEBUG nova.policy [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7764214371b643ed8eb86123b7950aa9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef8bd62a11d742aca1732be47a0791ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 538.613574] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.688155] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9d5701-507c-4b78-891d-dbc481b711f1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.699148] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1626963-7c8b-4af4-b964-70b66a92ae6b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.743909] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb805d7-6ff8-4aa0-badf-8493938304a5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.753708] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1145b688-e221-4eb9-aa5e-d13071858d19 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.768229] env[61905]: DEBUG nova.compute.provider_tree [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.886718] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 539.028578] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "a9ce5207-c493-4924-8371-db65cf359523" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.028867] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "a9ce5207-c493-4924-8371-db65cf359523" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.119267] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Releasing lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.120970] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 539.120970] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 539.120970] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-576c7376-6c31-4db3-bd8f-c75c14d2f4d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.137747] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e77bf2-c175-4870-ac8a-95094c9f6edb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.177361] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 056d4c90-e763-4488-a349-1adaddfc9e95 could not be found. [ 539.177361] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 539.177361] env[61905]: INFO nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Took 0.06 seconds to destroy the instance on the hypervisor. [ 539.177361] env[61905]: DEBUG oslo.service.loopingcall [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.178519] env[61905]: DEBUG nova.compute.manager [-] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.178587] env[61905]: DEBUG nova.network.neutron [-] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 539.182674] env[61905]: DEBUG nova.compute.manager [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Received event network-changed-e3313851-073d-4cae-bf90-bf32e98b2b28 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.182674] env[61905]: DEBUG nova.compute.manager [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Refreshing instance network info cache due to event network-changed-e3313851-073d-4cae-bf90-bf32e98b2b28. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 539.182674] env[61905]: DEBUG oslo_concurrency.lockutils [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] Acquiring lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.182674] env[61905]: DEBUG oslo_concurrency.lockutils [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] Acquired lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.184994] env[61905]: DEBUG nova.network.neutron [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Refreshing network info cache for port e3313851-073d-4cae-bf90-bf32e98b2b28 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 539.232107] env[61905]: DEBUG nova.network.neutron [-] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 539.274413] env[61905]: DEBUG nova.scheduler.client.report [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 539.586455] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Successfully created port: 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.721601] env[61905]: DEBUG nova.network.neutron [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 539.734805] env[61905]: DEBUG nova.network.neutron [-] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.779310] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.779998] env[61905]: ERROR nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Traceback (most recent call last): [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self.driver.spawn(context, instance, image_meta, [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] vm_ref = self.build_virtual_machine(instance, [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] vif_infos = vmwarevif.get_vif_info(self._session, [ 539.779998] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] for vif in network_info: [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return self._sync_wrapper(fn, *args, **kwargs) [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self.wait() [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self[:] = self._gt.wait() [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return self._exit_event.wait() [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] result = hub.switch() [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 539.780360] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return self.greenlet.switch() [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] result = function(*args, **kwargs) [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] return func(*args, **kwargs) [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] raise e [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] nwinfo = self.network_api.allocate_for_instance( [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] created_port_ids = self._update_ports_for_instance( [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] with excutils.save_and_reraise_exception(): [ 539.780707] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] self.force_reraise() [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] raise self.value [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] updated_port = self._update_port( [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] _ensure_no_port_binding_failure(port) [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] raise exception.PortBindingFailed(port_id=port['id']) [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] nova.exception.PortBindingFailed: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. [ 539.781142] env[61905]: ERROR nova.compute.manager [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] [ 539.781664] env[61905]: DEBUG nova.compute.utils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 539.783271] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.632s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.784399] env[61905]: INFO nova.compute.claims [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 539.787742] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Build of instance 33acde25-4310-4b16-bd9e-6ef8e27b49b6 was re-scheduled: Binding failed for port 4638735b-1be9-4df2-9a4e-2f5f17832f4f, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 539.788240] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 539.788466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquiring lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.789068] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Acquired lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.789068] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 539.898353] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 539.918985] env[61905]: DEBUG nova.network.neutron [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.927369] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.927792] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.927792] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.928016] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.932031] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.932031] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.932031] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.932031] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.932031] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.932468] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.932468] env[61905]: DEBUG nova.virt.hardware [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.932468] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc84810-f0cc-4509-aa8e-6c506f1b0835 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.947684] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bec280-5689-434f-884d-00109461657b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.241317] env[61905]: INFO nova.compute.manager [-] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Took 1.06 seconds to deallocate network for instance. [ 540.246848] env[61905]: DEBUG nova.compute.claims [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 540.247078] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.328243] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.427414] env[61905]: DEBUG oslo_concurrency.lockutils [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] Releasing lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.427414] env[61905]: DEBUG nova.compute.manager [req-e3428ef7-33a9-48fb-b9f2-a19efc6b4b2d req-4d7c356e-0e87-4ea6-8695-1a0b56ed0c50 service nova] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Received event network-vif-deleted-e3313851-073d-4cae-bf90-bf32e98b2b28 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 540.431097] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.818381] env[61905]: ERROR nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 540.818381] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.818381] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 540.818381] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 540.818381] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.818381] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.818381] env[61905]: ERROR nova.compute.manager raise self.value [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 540.818381] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 540.818381] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.818381] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 540.818846] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.818846] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 540.818846] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 540.818846] env[61905]: ERROR nova.compute.manager [ 540.818846] env[61905]: Traceback (most recent call last): [ 540.818846] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 540.818846] env[61905]: listener.cb(fileno) [ 540.818846] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.818846] env[61905]: result = function(*args, **kwargs) [ 540.818846] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 540.818846] env[61905]: return func(*args, **kwargs) [ 540.818846] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.818846] env[61905]: raise e [ 540.818846] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.818846] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 540.818846] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 540.818846] env[61905]: created_port_ids = self._update_ports_for_instance( [ 540.818846] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 540.818846] env[61905]: with excutils.save_and_reraise_exception(): [ 540.818846] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.818846] env[61905]: self.force_reraise() [ 540.818846] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.818846] env[61905]: raise self.value [ 540.818846] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 540.818846] env[61905]: updated_port = self._update_port( [ 540.818846] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.818846] env[61905]: _ensure_no_port_binding_failure(port) [ 540.818846] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.818846] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 540.819571] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 540.819571] env[61905]: Removing descriptor: 18 [ 540.819571] env[61905]: ERROR nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Traceback (most recent call last): [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] yield resources [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self.driver.spawn(context, instance, image_meta, [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self._vmops.spawn(context, instance, image_meta, injected_files, [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 540.819571] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] vm_ref = self.build_virtual_machine(instance, [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] vif_infos = vmwarevif.get_vif_info(self._session, [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] for vif in network_info: [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return self._sync_wrapper(fn, *args, **kwargs) [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self.wait() [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self[:] = self._gt.wait() [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return self._exit_event.wait() [ 540.819995] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] result = hub.switch() [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return self.greenlet.switch() [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] result = function(*args, **kwargs) [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return func(*args, **kwargs) [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] raise e [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] nwinfo = self.network_api.allocate_for_instance( [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 540.820347] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] created_port_ids = self._update_ports_for_instance( [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] with excutils.save_and_reraise_exception(): [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self.force_reraise() [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] raise self.value [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] updated_port = self._update_port( [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] _ensure_no_port_binding_failure(port) [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.820700] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] raise exception.PortBindingFailed(port_id=port['id']) [ 540.821022] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 540.821022] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] [ 540.821022] env[61905]: INFO nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Terminating instance [ 540.822158] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.822316] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquired lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.822476] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 540.934298] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Releasing lock "refresh_cache-33acde25-4310-4b16-bd9e-6ef8e27b49b6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.936916] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 540.936916] env[61905]: DEBUG nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 540.936916] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 540.965053] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.156840] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd87679a-dd6d-4e2f-aef3-59417a0c79ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.166617] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22135865-5da0-454a-980e-1958f25aa86c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.204152] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6990cf2b-23f9-4dff-8520-d85f4492c5fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.212885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f6090c-e22c-4630-9e15-4e3fb0ec0412 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.233539] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.369359] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.470995] env[61905]: DEBUG nova.network.neutron [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.503151] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.770334] env[61905]: ERROR nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [req-3b7e7a4c-9a40-424b-9b1b-6c935ae3e627] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3b7e7a4c-9a40-424b-9b1b-6c935ae3e627"}]} [ 541.801030] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 541.824089] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 541.824089] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.841274] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 541.861469] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 541.900763] env[61905]: ERROR nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 541.900763] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.900763] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.900763] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.900763] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.900763] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.900763] env[61905]: ERROR nova.compute.manager raise self.value [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.900763] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 541.900763] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.900763] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 541.901251] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.901251] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 541.901251] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 541.901251] env[61905]: ERROR nova.compute.manager [ 541.901411] env[61905]: Traceback (most recent call last): [ 541.903269] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 541.903269] env[61905]: listener.cb(fileno) [ 541.903269] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.903269] env[61905]: result = function(*args, **kwargs) [ 541.903269] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.903269] env[61905]: return func(*args, **kwargs) [ 541.903269] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.903269] env[61905]: raise e [ 541.903269] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.903269] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 541.903269] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.903269] env[61905]: created_port_ids = self._update_ports_for_instance( [ 541.903269] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.903269] env[61905]: with excutils.save_and_reraise_exception(): [ 541.903269] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.903269] env[61905]: self.force_reraise() [ 541.903269] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.903269] env[61905]: raise self.value [ 541.903269] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.903269] env[61905]: updated_port = self._update_port( [ 541.903269] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.903269] env[61905]: _ensure_no_port_binding_failure(port) [ 541.903269] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.903269] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 541.903269] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 541.903269] env[61905]: Removing descriptor: 17 [ 541.904179] env[61905]: ERROR nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Traceback (most recent call last): [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] yield resources [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self.driver.spawn(context, instance, image_meta, [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] vm_ref = self.build_virtual_machine(instance, [ 541.904179] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] for vif in network_info: [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return self._sync_wrapper(fn, *args, **kwargs) [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self.wait() [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self[:] = self._gt.wait() [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return self._exit_event.wait() [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.904476] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] result = hub.switch() [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return self.greenlet.switch() [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] result = function(*args, **kwargs) [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return func(*args, **kwargs) [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] raise e [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] nwinfo = self.network_api.allocate_for_instance( [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] created_port_ids = self._update_ports_for_instance( [ 541.905082] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] with excutils.save_and_reraise_exception(): [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self.force_reraise() [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] raise self.value [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] updated_port = self._update_port( [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] _ensure_no_port_binding_failure(port) [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] raise exception.PortBindingFailed(port_id=port['id']) [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 541.905727] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] [ 541.906120] env[61905]: INFO nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Terminating instance [ 541.906120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquiring lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.906120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquired lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.906120] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 541.976666] env[61905]: INFO nova.compute.manager [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] [instance: 33acde25-4310-4b16-bd9e-6ef8e27b49b6] Took 1.04 seconds to deallocate network for instance. [ 542.007649] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Releasing lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.008091] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 542.008346] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 542.010320] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6185b5c-5b54-4e0a-9538-69e409bc5a33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.026320] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381fbcba-b2ee-4215-aa1a-7f7ec99e0b11 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.054520] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c4642eab-5ac8-41c4-93a5-8429525f5120 could not be found. [ 542.055225] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 542.055225] env[61905]: INFO nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Took 0.05 seconds to destroy the instance on the hypervisor. [ 542.055423] env[61905]: DEBUG oslo.service.loopingcall [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.060250] env[61905]: DEBUG nova.compute.manager [-] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 542.060250] env[61905]: DEBUG nova.network.neutron [-] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 542.086326] env[61905]: DEBUG nova.network.neutron [-] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.244179] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2176366-2416-4c7f-8b2c-c4c7848028f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.252154] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5268b4-ecc4-421a-bb75-268e723e7276 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.297453] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a59f31-ec5b-4bca-8727-315c69018884 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.301690] env[61905]: DEBUG nova.compute.manager [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Received event network-changed-8055d540-f6ce-4a84-98b8-f088dca855c8 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 542.301963] env[61905]: DEBUG nova.compute.manager [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Refreshing instance network info cache due to event network-changed-8055d540-f6ce-4a84-98b8-f088dca855c8. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 542.302526] env[61905]: DEBUG oslo_concurrency.lockutils [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] Acquiring lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.302526] env[61905]: DEBUG oslo_concurrency.lockutils [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] Acquired lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.303138] env[61905]: DEBUG nova.network.neutron [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Refreshing network info cache for port 8055d540-f6ce-4a84-98b8-f088dca855c8 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 542.312622] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7018cc-e2a0-4e05-a882-a6c382633d00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.331042] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 542.441184] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.587927] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.589158] env[61905]: DEBUG nova.network.neutron [-] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.729737] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquiring lock "e997db40-b3a6-4c06-8991-cdb96954c0ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.729737] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Lock "e997db40-b3a6-4c06-8991-cdb96954c0ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.841313] env[61905]: DEBUG nova.network.neutron [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.869702] env[61905]: ERROR nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [req-167e5d24-47f3-4bf9-b765-4514d73b58f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-167e5d24-47f3-4bf9-b765-4514d73b58f0"}]} [ 542.907213] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 542.931973] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 542.932281] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 542.952898] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 542.975823] env[61905]: DEBUG nova.network.neutron [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.983821] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 543.028210] env[61905]: INFO nova.scheduler.client.report [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Deleted allocations for instance 33acde25-4310-4b16-bd9e-6ef8e27b49b6 [ 543.091445] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Releasing lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.091893] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 543.092162] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 543.092487] env[61905]: INFO nova.compute.manager [-] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Took 1.03 seconds to deallocate network for instance. [ 543.092781] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70614e78-f0f4-45cf-8d45-8a6c2eee008a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.104534] env[61905]: DEBUG nova.compute.claims [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 543.104721] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.112780] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b15131-1ae4-4546-919a-4f2e68bc0d4f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.139879] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d3ec2174-203c-43c6-9ecc-0a0d42fc35df could not be found. [ 543.140123] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 543.140354] env[61905]: INFO nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Took 0.05 seconds to destroy the instance on the hypervisor. [ 543.140541] env[61905]: DEBUG oslo.service.loopingcall [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 543.143344] env[61905]: DEBUG nova.compute.manager [-] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 543.143452] env[61905]: DEBUG nova.network.neutron [-] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 543.162903] env[61905]: DEBUG nova.network.neutron [-] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.365147] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9384e90-8c37-48dd-b5f6-a4b11f7101d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.374172] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8a4263-fa2c-4391-9e70-502588ab0a3b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.407611] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3372f12a-9b12-4b0c-bd1e-55ebb036ad0e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.417474] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b321b949-2656-4636-ae9c-0decf30037b9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.431933] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 543.482605] env[61905]: DEBUG oslo_concurrency.lockutils [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] Releasing lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.482605] env[61905]: DEBUG nova.compute.manager [req-3bd49802-0b05-4a8e-85a2-350a953c716e req-420ed1c4-8157-421c-9b1a-f799fcda82af service nova] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Received event network-vif-deleted-8055d540-f6ce-4a84-98b8-f088dca855c8 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 543.547221] env[61905]: DEBUG oslo_concurrency.lockutils [None req-607b799f-aa51-41cf-b17c-ec61a2abb7ee tempest-ServerExternalEventsTest-1495296064 tempest-ServerExternalEventsTest-1495296064-project-member] Lock "33acde25-4310-4b16-bd9e-6ef8e27b49b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.137s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.667771] env[61905]: DEBUG nova.network.neutron [-] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.965927] env[61905]: ERROR nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [req-fbe7cfef-4948-4863-ad36-3d447ce41d76] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fbe7cfef-4948-4863-ad36-3d447ce41d76"}]} [ 543.991405] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 544.019835] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 544.019835] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 544.041803] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: a8d0cda8-ed5d-48da-9b7c-551b533b5b5a {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 544.050734] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 544.065664] env[61905]: DEBUG nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 544.170776] env[61905]: INFO nova.compute.manager [-] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Took 1.03 seconds to deallocate network for instance. [ 544.173135] env[61905]: DEBUG nova.compute.claims [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 544.173298] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.488034] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb64fca4-105a-4e68-a877-82846f0fffa8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.503159] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74178313-d822-4750-82b3-5630ad735a0f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.539269] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ddfd7de-7552-40c3-bd63-be6f55b71bed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.547053] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e00905-155d-45c8-a24d-6901e8fb809f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.564028] env[61905]: DEBUG nova.compute.provider_tree [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 544.573437] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquiring lock "426a5334-83fb-4c2a-85ac-42a8dddd775b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.573641] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "426a5334-83fb-4c2a-85ac-42a8dddd775b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.583086] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.100474] env[61905]: ERROR nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [req-a8acb25a-e26c-4e9d-a327-f873c1a2bbf4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a8acb25a-e26c-4e9d-a327-f873c1a2bbf4"}]} [ 545.100474] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.315s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.100861] env[61905]: ERROR nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Failed to build and run instance: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 (generation 27): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a8acb25a-e26c-4e9d-a327-f873c1a2bbf4"}]} [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Traceback (most recent call last): [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] with self.rt.instance_claim(context, instance, node, allocs, [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] return f(*args, **kwargs) [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 215, in instance_claim [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] self._update(elevated, cn) [ 545.100861] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] self._update_to_placement(context, compute_node, startup) [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] raise attempt.get() [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 719, in reraise [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] raise value [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 545.101151] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] self.reportclient.update_from_provider_tree( [ 545.101569] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1498, in update_from_provider_tree [ 545.101569] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] self.set_inventory_for_provider( [ 545.101569] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1003, in set_inventory_for_provider [ 545.101569] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] raise exception.ResourceProviderUpdateConflict( [ 545.101569] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 (generation 27): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a8acb25a-e26c-4e9d-a327-f873c1a2bbf4"}]} [ 545.101569] env[61905]: ERROR nova.compute.manager [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] [ 545.109023] env[61905]: DEBUG nova.compute.utils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] A conflict was encountered attempting to update resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 (generation 27): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource pro {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 545.109023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.448s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.112698] env[61905]: DEBUG nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Build of instance 82d50809-1df7-4055-97e8-863c0bdde21f was re-scheduled: A conflict was encountered attempting to update resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 (generation 27): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a8acb25a-e26c-4e9d-a327-f873c1a2bbf4"}]} {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 545.113227] env[61905]: DEBUG nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 545.113687] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Acquiring lock "refresh_cache-82d50809-1df7-4055-97e8-863c0bdde21f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.114043] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Acquired lock "refresh_cache-82d50809-1df7-4055-97e8-863c0bdde21f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.114370] env[61905]: DEBUG nova.network.neutron [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 545.181323] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "eb372895-68b6-41cb-8ae5-dbfd57387505" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.181793] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "eb372895-68b6-41cb-8ae5-dbfd57387505" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.216115] env[61905]: DEBUG nova.compute.manager [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Received event network-changed-55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 545.216115] env[61905]: DEBUG nova.compute.manager [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Refreshing instance network info cache due to event network-changed-55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 545.216115] env[61905]: DEBUG oslo_concurrency.lockutils [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] Acquiring lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.216115] env[61905]: DEBUG oslo_concurrency.lockutils [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] Acquired lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.216418] env[61905]: DEBUG nova.network.neutron [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Refreshing network info cache for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 545.642795] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 545.647199] env[61905]: DEBUG nova.network.neutron [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.655567] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 545.656144] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 545.670667] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 545.692598] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 545.753077] env[61905]: DEBUG nova.network.neutron [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 545.836750] env[61905]: DEBUG nova.network.neutron [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.964327] env[61905]: DEBUG nova.network.neutron [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.073248] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8a047a-1c29-4204-801a-369aa70d3aa0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.085028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80637ec0-b343-4041-95a5-ae97e5cb203c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.122861] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183dd062-342e-4f3c-b2b4-91279ddbae80 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.131436] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cac205-0d70-49b0-8348-1ac3a9c276b2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.147066] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 546.345192] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Releasing lock "refresh_cache-82d50809-1df7-4055-97e8-863c0bdde21f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.345192] env[61905]: DEBUG nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 546.345192] env[61905]: DEBUG nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 546.345192] env[61905]: DEBUG nova.network.neutron [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 546.372605] env[61905]: DEBUG nova.network.neutron [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 546.472286] env[61905]: DEBUG oslo_concurrency.lockutils [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] Releasing lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.472413] env[61905]: DEBUG nova.compute.manager [req-6d9910fd-e70f-447a-86cc-6d2c3a890acc req-3a58a5c6-00f5-4665-9504-7ae2328e1f4b service nova] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Received event network-vif-deleted-55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.610858] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "b9199119-9d4e-4b04-8675-22f6680da8b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.611048] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "b9199119-9d4e-4b04-8675-22f6680da8b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.675296] env[61905]: ERROR nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [req-d05803f2-4900-493f-a550-5894d1b2a5fa] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d05803f2-4900-493f-a550-5894d1b2a5fa"}]}: nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 546.699072] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 546.715880] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 546.716231] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 546.731619] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 546.755109] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 546.881325] env[61905]: DEBUG nova.network.neutron [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.195898] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0813b347-a844-44b1-baaf-ca18a54398a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.207528] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f365a2-762e-4843-af8f-04322640ac49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.239260] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b449f3-480d-4b70-90bc-81db514049bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.247213] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2766e7c-5c1a-4e0c-aefc-c47eecfc2a2c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.264455] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 547.386284] env[61905]: INFO nova.compute.manager [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] [instance: 82d50809-1df7-4055-97e8-863c0bdde21f] Took 1.04 seconds to deallocate network for instance. [ 547.791682] env[61905]: ERROR nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [req-6b5e2454-4c3c-4390-b607-04a52d88ffde] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b5e2454-4c3c-4390-b607-04a52d88ffde"}]}: nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 547.813028] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 547.829818] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 547.829818] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 547.852507] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 547.874981] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 548.196793] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquiring lock "949307dd-f8c4-4a79-ad82-99d416d06332" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.197319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Lock "949307dd-f8c4-4a79-ad82-99d416d06332" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.315937] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811b4e0a-a517-4ca2-8037-282599538bc5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.324411] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d73d874-7bb7-42c1-b8ad-c0a51e4f943f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.359181] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77135e9b-e2a4-4dea-ab41-a2a90e4e552f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.369112] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431af79a-0268-4d88-a975-bd2398947fe2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.390121] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 548.399882] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.400126] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.431183] env[61905]: INFO nova.scheduler.client.report [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Deleted allocations for instance 82d50809-1df7-4055-97e8-863c0bdde21f [ 548.920316] env[61905]: ERROR nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [req-b84ecf69-60ef-447f-8300-a9fa862625aa] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b84ecf69-60ef-447f-8300-a9fa862625aa"}]}: nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 548.944676] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 548.947450] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9ea86bb1-ae42-4a48-966f-6cf5ffb254ba tempest-FloatingIPsAssociationNegativeTestJSON-1119773338 tempest-FloatingIPsAssociationNegativeTestJSON-1119773338-project-member] Lock "82d50809-1df7-4055-97e8-863c0bdde21f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.842s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.970456] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 548.970683] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 548.983379] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 549.008696] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 549.421111] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f8a342-6495-45b9-ab04-b2b1426241e3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.430198] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee3d141-5917-4cef-992a-ae16c166b8f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.462633] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 549.471019] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b76850-92b4-4448-8a5e-df4544d83256 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.475911] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615f1b33-7c42-4b5d-a1c2-7ffd97fd4379 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.492053] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.009322] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.040134] env[61905]: DEBUG nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updated inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with generation 33 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 550.040635] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 generation from 33 to 34 during operation: update_inventory {{(pid=61905) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 550.040689] env[61905]: DEBUG nova.compute.provider_tree [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 550.548916] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 5.441s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.549606] env[61905]: ERROR nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Traceback (most recent call last): [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self.driver.spawn(context, instance, image_meta, [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] vm_ref = self.build_virtual_machine(instance, [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.549606] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] for vif in network_info: [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return self._sync_wrapper(fn, *args, **kwargs) [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self.wait() [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self[:] = self._gt.wait() [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return self._exit_event.wait() [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] result = hub.switch() [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.550041] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return self.greenlet.switch() [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] result = function(*args, **kwargs) [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] return func(*args, **kwargs) [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] raise e [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] nwinfo = self.network_api.allocate_for_instance( [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] created_port_ids = self._update_ports_for_instance( [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] with excutils.save_and_reraise_exception(): [ 550.550377] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] self.force_reraise() [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] raise self.value [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] updated_port = self._update_port( [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] _ensure_no_port_binding_failure(port) [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] raise exception.PortBindingFailed(port_id=port['id']) [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] nova.exception.PortBindingFailed: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. [ 550.550704] env[61905]: ERROR nova.compute.manager [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] [ 550.550991] env[61905]: DEBUG nova.compute.utils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 550.556083] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.919s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.557124] env[61905]: INFO nova.compute.claims [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.561546] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Build of instance c6698e84-05f7-4a92-809e-f48e0835a1d2 was re-scheduled: Binding failed for port b1760e30-b41c-4177-858f-d57ca7694866, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 550.562069] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 550.562381] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.562539] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquired lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.562798] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.106346] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.212333] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.713499] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Releasing lock "refresh_cache-c6698e84-05f7-4a92-809e-f48e0835a1d2" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.714249] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 551.714323] env[61905]: DEBUG nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.714434] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 551.876854] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.961410] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04db7673-917b-4724-943b-7826c148b93a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.973489] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9d7d62-f15e-464d-af9c-0599f45bebda {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.006877] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6938c948-bd79-4398-9d6d-7ad490abbb48 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.016950] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b94eacd-9c0d-413f-8890-099c7b7fd2ea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.036756] env[61905]: DEBUG nova.compute.provider_tree [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.236747] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "b92a6db6-c51a-45c8-9792-d394027bcb7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.237015] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "b92a6db6-c51a-45c8-9792-d394027bcb7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.381965] env[61905]: DEBUG nova.network.neutron [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.568324] env[61905]: ERROR nova.scheduler.client.report [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [req-2045cb12-bbb4-42e0-b846-43fa711668bb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2045cb12-bbb4-42e0-b846-43fa711668bb"}]} [ 552.589534] env[61905]: DEBUG nova.scheduler.client.report [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 552.622553] env[61905]: DEBUG nova.scheduler.client.report [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 552.622553] env[61905]: DEBUG nova.compute.provider_tree [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.646621] env[61905]: DEBUG nova.scheduler.client.report [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 552.686448] env[61905]: DEBUG nova.scheduler.client.report [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 552.887214] env[61905]: INFO nova.compute.manager [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: c6698e84-05f7-4a92-809e-f48e0835a1d2] Took 1.17 seconds to deallocate network for instance. [ 553.179069] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befa28f8-41c8-46de-a9f4-8988cd232762 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.188065] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d55e09-4005-453b-b8bb-c41974f55982 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.227328] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b19429d-1d13-44ff-b16f-13b2eef92dfb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.238791] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e5db50-4e3b-49b4-ada8-6755d4067648 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.265155] env[61905]: DEBUG nova.compute.provider_tree [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.824031] env[61905]: DEBUG nova.scheduler.client.report [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updated inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with generation 35 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 553.824031] env[61905]: DEBUG nova.compute.provider_tree [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updating resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 generation from 35 to 36 during operation: update_inventory {{(pid=61905) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 553.824031] env[61905]: DEBUG nova.compute.provider_tree [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.935909] env[61905]: INFO nova.scheduler.client.report [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Deleted allocations for instance c6698e84-05f7-4a92-809e-f48e0835a1d2 [ 554.329031] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.774s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.329137] env[61905]: DEBUG nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 554.334248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.575s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.446596] env[61905]: DEBUG oslo_concurrency.lockutils [None req-58713e48-2954-432d-8a1e-91be7d9f5611 tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "c6698e84-05f7-4a92-809e-f48e0835a1d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.673s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.835507] env[61905]: DEBUG nova.compute.utils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.840163] env[61905]: DEBUG nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 554.951088] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 555.313148] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f8968a-8294-42c0-a0d7-5e1a2e4466eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.335919] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9bb9bb-eb3a-474f-8e3f-61d931f75612 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.370201] env[61905]: DEBUG nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 555.374354] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1262c1f-2d73-47bd-a41a-f8b1594b8bb5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.382984] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03dcd465-1e49-4569-9396-f054d06eba6b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.398847] env[61905]: DEBUG nova.compute.provider_tree [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.481865] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.906817] env[61905]: DEBUG nova.scheduler.client.report [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.380716] env[61905]: DEBUG nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 556.404603] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.404855] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.405030] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.405214] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.405355] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.405494] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.405723] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.405885] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.408375] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.409365] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.409365] env[61905]: DEBUG nova.virt.hardware [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.412269] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6618a0-091b-4145-9612-00dac0059012 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.417127] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.084s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.417695] env[61905]: ERROR nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] Traceback (most recent call last): [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self.driver.spawn(context, instance, image_meta, [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] vm_ref = self.build_virtual_machine(instance, [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.417695] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] for vif in network_info: [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return self._sync_wrapper(fn, *args, **kwargs) [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self.wait() [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self[:] = self._gt.wait() [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return self._exit_event.wait() [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] result = hub.switch() [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.419475] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return self.greenlet.switch() [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] result = function(*args, **kwargs) [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] return func(*args, **kwargs) [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] raise e [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] nwinfo = self.network_api.allocate_for_instance( [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] created_port_ids = self._update_ports_for_instance( [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] with excutils.save_and_reraise_exception(): [ 556.420542] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] self.force_reraise() [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] raise self.value [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] updated_port = self._update_port( [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] _ensure_no_port_binding_failure(port) [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] raise exception.PortBindingFailed(port_id=port['id']) [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] nova.exception.PortBindingFailed: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. [ 556.420984] env[61905]: ERROR nova.compute.manager [instance: 014849e7-a41c-432e-81ae-03725825166e] [ 556.421303] env[61905]: DEBUG nova.compute.utils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 556.421303] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Build of instance 014849e7-a41c-432e-81ae-03725825166e was re-scheduled: Binding failed for port 7273f9ef-bdfa-470b-b60e-a9bfc84f3155, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 556.421303] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 556.421303] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquiring lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.421463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Acquired lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.421498] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.425881] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.707s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.432515] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bca93c2-d3b0-4125-9b4c-1422a6a7e699 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.451152] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 556.460976] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.462207] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c63753f7-ad07-4cd3-a394-2b90a3606ef0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.477112] env[61905]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 556.477299] env[61905]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61905) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 556.478594] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 556.478916] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating folder: Project (d7e97541d6504c1ba93fd732409a2a92). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.479466] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e068580-ee76-4ec0-a01c-b006628013df {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.493761] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Created folder: Project (d7e97541d6504c1ba93fd732409a2a92) in parent group-v289968. [ 556.493761] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating folder: Instances. Parent ref: group-v289973. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 556.496021] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6de7dab4-d13a-442b-8552-cbbfa5b81cd7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.505384] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Created folder: Instances in parent group-v289973. [ 556.505384] env[61905]: DEBUG oslo.service.loopingcall [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.505384] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 556.505384] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fba2e44-c4d1-4fab-8aaa-2861e25b7467 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.529922] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 556.529922] env[61905]: value = "task-1362259" [ 556.529922] env[61905]: _type = "Task" [ 556.529922] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.543077] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362259, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.955992] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.048178] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362259, 'name': CreateVM_Task, 'duration_secs': 0.310796} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.048346] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 557.051471] env[61905]: DEBUG oslo_vmware.service [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8570136e-d23e-4878-a643-4df93d6fa584 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.065115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.065115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.065115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.065115] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfa34ad2-dafe-4778-92a1-6e5f9e6748ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.071787] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 557.071787] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521f6f29-38fd-b0ed-b98f-6e8cfffa5e45" [ 557.071787] env[61905]: _type = "Task" [ 557.071787] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.081560] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521f6f29-38fd-b0ed-b98f-6e8cfffa5e45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.124574] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.392516] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde20d34-e599-418f-94f7-8efc5d9c97bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.405125] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5e9e46-94b3-4c92-a5bf-1b426c053f03 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.420135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquiring lock "362c9148-9f78-4700-9c6f-7fd0eaef4bd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.420425] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Lock "362c9148-9f78-4700-9c6f-7fd0eaef4bd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.447361] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd12568a-5ea1-4804-b4ee-93b7497827e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.459389] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79752084-fd5a-4cc5-a888-15b5f6a57ec2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.475591] env[61905]: DEBUG nova.compute.provider_tree [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.584196] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.584461] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 557.584695] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.584838] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.585454] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 557.585615] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83c9575c-3c31-4c82-8396-9cc5e6946403 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.610809] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 557.610995] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 557.611815] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413d5138-46ce-41f1-8ea7-9c8a5948b43e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.619835] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a776495-1ae5-4051-8bfc-0b2c57671de5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.628921] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Releasing lock "refresh_cache-014849e7-a41c-432e-81ae-03725825166e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.629159] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 557.629333] env[61905]: DEBUG nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 557.629497] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 557.631313] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 557.631313] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ff4b8d-58dd-37cd-1e8c-d7c1eca3ebcb" [ 557.631313] env[61905]: _type = "Task" [ 557.631313] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.640840] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ff4b8d-58dd-37cd-1e8c-d7c1eca3ebcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.660199] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.979965] env[61905]: DEBUG nova.scheduler.client.report [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.144701] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Preparing fetch location {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 558.144983] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating directory with path [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 558.145250] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77aaf78f-1355-4281-a5b1-8487415c2089 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.163025] env[61905]: DEBUG nova.network.neutron [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.168502] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Created directory with path [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 558.168700] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Fetch image to [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 558.169107] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Downloading image file data 4d166298-c700-4bc6-8f8f-67684a277053 to [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk on the data store datastore2 {{(pid=61905) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 558.170796] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2afbe7-db5f-42e4-9cd9-aa87a35d74b9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.177853] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6128e4ec-21be-4b1f-a625-5045c30cc311 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.191797] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79651f46-fbff-44ff-ad92-814c6e3b2a54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.228191] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b424d5-94d4-4d7a-8463-3744f3882a41 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.235371] env[61905]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2d8e308e-94fb-423f-b035-cb27e2c3e0ad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.259668] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Downloading image file data 4d166298-c700-4bc6-8f8f-67684a277053 to the data store datastore2 {{(pid=61905) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 558.315215] env[61905]: DEBUG oslo_vmware.rw_handles [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61905) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 558.487071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.061s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.487905] env[61905]: ERROR nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Traceback (most recent call last): [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self.driver.spawn(context, instance, image_meta, [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] vm_ref = self.build_virtual_machine(instance, [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.487905] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] for vif in network_info: [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return self._sync_wrapper(fn, *args, **kwargs) [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self.wait() [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self[:] = self._gt.wait() [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return self._exit_event.wait() [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] result = hub.switch() [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.488374] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return self.greenlet.switch() [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] result = function(*args, **kwargs) [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] return func(*args, **kwargs) [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] raise e [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] nwinfo = self.network_api.allocate_for_instance( [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] created_port_ids = self._update_ports_for_instance( [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] with excutils.save_and_reraise_exception(): [ 558.489189] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] self.force_reraise() [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] raise self.value [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] updated_port = self._update_port( [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] _ensure_no_port_binding_failure(port) [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] raise exception.PortBindingFailed(port_id=port['id']) [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] nova.exception.PortBindingFailed: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. [ 558.489627] env[61905]: ERROR nova.compute.manager [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] [ 558.489986] env[61905]: DEBUG nova.compute.utils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 558.490832] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Build of instance 708b8fc5-a919-449a-a8bb-0c0d3a40b952 was re-scheduled: Binding failed for port abe38819-3cdb-4ba8-8355-c44ed7ae00c9, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 558.491326] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 558.491612] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquiring lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.491846] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Acquired lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.491979] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 558.497184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.882s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.499586] env[61905]: INFO nova.compute.claims [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.545949] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 558.666825] env[61905]: INFO nova.compute.manager [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] [instance: 014849e7-a41c-432e-81ae-03725825166e] Took 1.04 seconds to deallocate network for instance. [ 558.833205] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.958194] env[61905]: DEBUG oslo_vmware.rw_handles [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Completed reading data from the image iterator. {{(pid=61905) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 558.958425] env[61905]: DEBUG oslo_vmware.rw_handles [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 559.107444] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Downloaded image file data 4d166298-c700-4bc6-8f8f-67684a277053 to vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk on the data store datastore2 {{(pid=61905) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 559.109257] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Caching image {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 559.109499] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Copying Virtual Disk [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk to [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 559.111149] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b4c7587-5b8e-4d11-9cfe-27e6fd6ccd65 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.123381] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 559.123381] env[61905]: value = "task-1362260" [ 559.123381] env[61905]: _type = "Task" [ 559.123381] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.134898] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.342028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Releasing lock "refresh_cache-708b8fc5-a919-449a-a8bb-0c0d3a40b952" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.342028] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 559.342028] env[61905]: DEBUG nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 559.342028] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 559.369161] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 559.639136] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362260, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.651242] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquiring lock "105aed8e-4268-4553-9564-1540cb5176dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.653467] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Lock "105aed8e-4268-4553-9564-1540cb5176dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.705922] env[61905]: INFO nova.scheduler.client.report [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Deleted allocations for instance 014849e7-a41c-432e-81ae-03725825166e [ 559.875374] env[61905]: DEBUG nova.network.neutron [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.994102] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa2ada9-1f89-42f2-bef6-544c0f32e165 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.002165] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d42cf0b-f16c-4e06-bcf7-bf6ca0efd0f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.037592] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b260a34b-a7d4-41c0-9569-e16ced272a9e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.048882] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b29dc47-6695-4046-819a-41ac98ec4851 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.063392] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 560.137955] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726735} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.139609] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Copied Virtual Disk [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk to [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 560.139609] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleting the datastore file [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 560.139609] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1888e7f6-3fed-4c7b-8568-e16981d264c0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.150044] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 560.150044] env[61905]: value = "task-1362261" [ 560.150044] env[61905]: _type = "Task" [ 560.150044] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.158894] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.216568] env[61905]: DEBUG oslo_concurrency.lockutils [None req-af6c9747-a8f1-4e9f-83c6-d7f5bae9d438 tempest-ImagesOneServerTestJSON-2033262084 tempest-ImagesOneServerTestJSON-2033262084-project-member] Lock "014849e7-a41c-432e-81ae-03725825166e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.404s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.377774] env[61905]: INFO nova.compute.manager [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] [instance: 708b8fc5-a919-449a-a8bb-0c0d3a40b952] Took 1.04 seconds to deallocate network for instance. [ 560.475375] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "60f2eb1d-de4c-4318-98c3-eb2d411c120b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.475579] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "60f2eb1d-de4c-4318-98c3-eb2d411c120b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.590628] env[61905]: ERROR nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [req-a9d70fcc-139b-489f-a293-c7516ffda79c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a9d70fcc-139b-489f-a293-c7516ffda79c"}]} [ 560.614669] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 560.632359] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 560.632359] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 560.649517] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 560.664837] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033289} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.665286] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.665441] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Moving file from [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2/4d166298-c700-4bc6-8f8f-67684a277053 to [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053. {{(pid=61905) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 560.665753] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-723f9a1d-d6fb-4193-ae56-31285cd74079 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.674383] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 560.674383] env[61905]: value = "task-1362262" [ 560.674383] env[61905]: _type = "Task" [ 560.674383] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.682018] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 560.692570] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362262, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.719560] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 561.092542] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc67f397-66ca-48b2-af93-405351cbb5ef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.099929] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2cf93d-2aca-4850-8cff-3e60a9227dde {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.133885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8a0654-1013-4b65-81d0-c1703f2e37fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.142409] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c803871b-055f-4e48-9763-51a55804db49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.160914] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 561.184471] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362262, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028017} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.184748] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] File moved {{(pid=61905) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 561.184894] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Cleaning up location [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 561.185063] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleting the datastore file [datastore2] vmware_temp/d08c3b9f-4880-4071-aa1b-f6d7eaffe8f2 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 561.185311] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d0becb8-83a0-4690-9e0f-e8db048917d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.193106] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 561.193106] env[61905]: value = "task-1362263" [ 561.193106] env[61905]: _type = "Task" [ 561.193106] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.202112] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362263, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.248505] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.429265] env[61905]: INFO nova.scheduler.client.report [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Deleted allocations for instance 708b8fc5-a919-449a-a8bb-0c0d3a40b952 [ 561.687976] env[61905]: ERROR nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [req-36f5de86-e52b-45eb-98da-1ef2e3cf661f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 9cb855ec-212a-457a-a4ff-55e9d97323b7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-36f5de86-e52b-45eb-98da-1ef2e3cf661f"}]} [ 561.706868] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362263, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027415} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.707323] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 561.708088] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2285890-5815-4ff6-bac8-fbcaf5378879 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.714732] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 561.723810] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 561.723810] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52604a32-9671-4417-6829-9d764664dcdc" [ 561.723810] env[61905]: _type = "Task" [ 561.723810] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.733819] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52604a32-9671-4417-6829-9d764664dcdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.734825] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 561.735328] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 150, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 561.755395] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 561.777659] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 561.854620] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquiring lock "ded96da7-74a4-4364-8424-22000411f5fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.854620] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Lock "ded96da7-74a4-4364-8424-22000411f5fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.940926] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8184bbdc-14c4-4498-82dd-007f554fbcb2 tempest-ServersTestManualDisk-1871751139 tempest-ServersTestManualDisk-1871751139-project-member] Lock "708b8fc5-a919-449a-a8bb-0c0d3a40b952" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.167s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.120136] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69bcf95-cf4d-4729-9603-15a959bcf958 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.129549] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc52e5dc-bc48-47c8-9455-ae8b68ebf284 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.162839] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8614cae5-e578-4a29-abf6-508be1f32c70 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.171175] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee038568-0ec8-4c80-b6dd-a07d521d995b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.186526] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 562.237172] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52604a32-9671-4417-6829-9d764664dcdc, 'name': SearchDatastore_Task, 'duration_secs': 0.010362} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.237491] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.237772] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 562.238155] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc02d657-b0d7-4ac6-8602-e02ba5fe5d1f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.247618] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 562.247618] env[61905]: value = "task-1362264" [ 562.247618] env[61905]: _type = "Task" [ 562.247618] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.256318] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.443721] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 562.725494] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updated inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with generation 40 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 562.725494] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 generation from 40 to 41 during operation: update_inventory {{(pid=61905) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 562.725494] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 562.760151] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494789} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.760331] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 562.760604] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 562.760861] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-571db2c9-77b9-4691-8c13-2ca245edadf4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.768770] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 562.768770] env[61905]: value = "task-1362265" [ 562.768770] env[61905]: _type = "Task" [ 562.768770] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.778024] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.980369] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.230473] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.733s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.230987] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 563.234079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.986s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.281407] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071655} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.285253] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 563.292334] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c48a90-0aa1-4f3f-b6fa-10623867487a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.318318] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 563.318949] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-625bfb51-632d-415a-9f45-f43ea725f5c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.341375] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 563.341375] env[61905]: value = "task-1362266" [ 563.341375] env[61905]: _type = "Task" [ 563.341375] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.350390] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.737931] env[61905]: DEBUG nova.compute.utils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.748025] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 563.748025] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.842478] env[61905]: DEBUG nova.policy [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cad66f711e2d4972bca5d9e85395570d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21df24c46bdb48eaab7ebae22a10a668', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 563.858676] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362266, 'name': ReconfigVM_Task, 'duration_secs': 0.310331} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.858995] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 563.862903] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c0c551d-7ab3-4dc8-a3ba-b42eb5e8f40c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.871610] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 563.871610] env[61905]: value = "task-1362267" [ 563.871610] env[61905]: _type = "Task" [ 563.871610] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.884381] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362267, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.204134] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a1d2c9-698e-4759-871f-63f149d9802c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.213458] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1e8a1c-119b-49d8-be97-2136562ed1a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.247021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2800a96c-d7c5-4aa3-b83b-508326f7c291 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.249081] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 564.258695] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9bd603-1512-4c20-96de-a595d7b72c9f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.274804] env[61905]: DEBUG nova.compute.provider_tree [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.343856] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.344091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.381579] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362267, 'name': Rename_Task, 'duration_secs': 0.161036} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.381859] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 564.383814] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9090bff-2dc2-4adf-ac69-5b7151436982 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.391064] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 564.391064] env[61905]: value = "task-1362268" [ 564.391064] env[61905]: _type = "Task" [ 564.391064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.401378] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.624224] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Successfully created port: d630a755-6b72-4301-b52f-164a6e89ee82 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.783478] env[61905]: DEBUG nova.scheduler.client.report [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.900996] env[61905]: DEBUG oslo_vmware.api [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362268, 'name': PowerOnVM_Task, 'duration_secs': 0.448812} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.901405] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 564.901616] env[61905]: INFO nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Took 8.52 seconds to spawn the instance on the hypervisor. [ 564.901927] env[61905]: DEBUG nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 564.902686] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca67e90-3a1f-4980-8de3-c9bfc8cb5c7e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.267579] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 565.290817] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.057s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.293161] env[61905]: ERROR nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Traceback (most recent call last): [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self.driver.spawn(context, instance, image_meta, [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self._vmops.spawn(context, instance, image_meta, injected_files, [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] vm_ref = self.build_virtual_machine(instance, [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] vif_infos = vmwarevif.get_vif_info(self._session, [ 565.293161] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] for vif in network_info: [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return self._sync_wrapper(fn, *args, **kwargs) [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self.wait() [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self[:] = self._gt.wait() [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return self._exit_event.wait() [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] result = hub.switch() [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 565.293580] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return self.greenlet.switch() [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] result = function(*args, **kwargs) [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] return func(*args, **kwargs) [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] raise e [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] nwinfo = self.network_api.allocate_for_instance( [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] created_port_ids = self._update_ports_for_instance( [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] with excutils.save_and_reraise_exception(): [ 565.294100] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] self.force_reraise() [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] raise self.value [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] updated_port = self._update_port( [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] _ensure_no_port_binding_failure(port) [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] raise exception.PortBindingFailed(port_id=port['id']) [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] nova.exception.PortBindingFailed: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. [ 565.294516] env[61905]: ERROR nova.compute.manager [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] [ 565.295124] env[61905]: DEBUG nova.compute.utils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 565.295279] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.190s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.298132] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Build of instance 056d4c90-e763-4488-a349-1adaddfc9e95 was re-scheduled: Binding failed for port e3313851-073d-4cae-bf90-bf32e98b2b28, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 565.298574] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 565.298798] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquiring lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.298944] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Acquired lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.299118] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 565.304810] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 565.304810] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 565.304810] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.305049] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 565.305049] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.305049] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 565.305049] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 565.305049] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 565.305227] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 565.305468] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 565.305694] env[61905]: DEBUG nova.virt.hardware [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 565.307031] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069f83ce-72d4-4ca0-882f-37694002c5e0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.317132] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cd987c-a647-44df-a8dd-20079a415e76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.422945] env[61905]: INFO nova.compute.manager [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Took 31.82 seconds to build instance. [ 565.840977] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.925503] env[61905]: DEBUG oslo_concurrency.lockutils [None req-efb3449b-9576-4390-b828-07f2ae725ea5 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "72770472-1b79-4408-b32c-34e56fd27c45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.548s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.027906] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.267463] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f425ae3-281b-47a8-8e8f-6f9f662a196e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.277294] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91caa4ac-d44b-4aef-a499-004b39aeb68c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.310169] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9711e988-4345-4e20-94bb-4607ba224588 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.319498] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63cfaf4-cb2f-4758-b578-cda0172c0f77 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.335035] env[61905]: DEBUG nova.compute.provider_tree [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.429039] env[61905]: DEBUG nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 566.536223] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Releasing lock "refresh_cache-056d4c90-e763-4488-a349-1adaddfc9e95" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.536444] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 566.536613] env[61905]: DEBUG nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 566.536785] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 566.578733] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.840032] env[61905]: DEBUG nova.scheduler.client.report [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.967636] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.085672] env[61905]: DEBUG nova.network.neutron [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.262753] env[61905]: ERROR nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 567.262753] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.262753] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.262753] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.262753] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.262753] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.262753] env[61905]: ERROR nova.compute.manager raise self.value [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.262753] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 567.262753] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.262753] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 567.263352] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.263352] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 567.263352] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 567.263352] env[61905]: ERROR nova.compute.manager [ 567.263352] env[61905]: Traceback (most recent call last): [ 567.263352] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 567.263352] env[61905]: listener.cb(fileno) [ 567.263352] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.263352] env[61905]: result = function(*args, **kwargs) [ 567.263352] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.263352] env[61905]: return func(*args, **kwargs) [ 567.263352] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.263352] env[61905]: raise e [ 567.263352] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.263352] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 567.263352] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.263352] env[61905]: created_port_ids = self._update_ports_for_instance( [ 567.263352] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.263352] env[61905]: with excutils.save_and_reraise_exception(): [ 567.263352] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.263352] env[61905]: self.force_reraise() [ 567.263352] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.263352] env[61905]: raise self.value [ 567.263352] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.263352] env[61905]: updated_port = self._update_port( [ 567.263352] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.263352] env[61905]: _ensure_no_port_binding_failure(port) [ 567.263352] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.263352] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 567.264363] env[61905]: nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 567.264363] env[61905]: Removing descriptor: 18 [ 567.264363] env[61905]: ERROR nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Traceback (most recent call last): [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] yield resources [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self.driver.spawn(context, instance, image_meta, [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.264363] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] vm_ref = self.build_virtual_machine(instance, [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] for vif in network_info: [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return self._sync_wrapper(fn, *args, **kwargs) [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self.wait() [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self[:] = self._gt.wait() [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return self._exit_event.wait() [ 567.264791] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] result = hub.switch() [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return self.greenlet.switch() [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] result = function(*args, **kwargs) [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return func(*args, **kwargs) [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] raise e [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] nwinfo = self.network_api.allocate_for_instance( [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.265226] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] created_port_ids = self._update_ports_for_instance( [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] with excutils.save_and_reraise_exception(): [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self.force_reraise() [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] raise self.value [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] updated_port = self._update_port( [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] _ensure_no_port_binding_failure(port) [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.265625] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] raise exception.PortBindingFailed(port_id=port['id']) [ 567.266359] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 567.266359] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] [ 567.266359] env[61905]: INFO nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Terminating instance [ 567.272491] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.272697] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquired lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.272924] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.347533] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.050s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 567.347533] env[61905]: ERROR nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Traceback (most recent call last): [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self.driver.spawn(context, instance, image_meta, [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.347533] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] vm_ref = self.build_virtual_machine(instance, [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] for vif in network_info: [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return self._sync_wrapper(fn, *args, **kwargs) [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self.wait() [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self[:] = self._gt.wait() [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return self._exit_event.wait() [ 567.347832] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] result = hub.switch() [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return self.greenlet.switch() [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] result = function(*args, **kwargs) [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] return func(*args, **kwargs) [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] raise e [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] nwinfo = self.network_api.allocate_for_instance( [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 567.348199] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] created_port_ids = self._update_ports_for_instance( [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] with excutils.save_and_reraise_exception(): [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] self.force_reraise() [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] raise self.value [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] updated_port = self._update_port( [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] _ensure_no_port_binding_failure(port) [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.348553] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] raise exception.PortBindingFailed(port_id=port['id']) [ 567.348887] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] nova.exception.PortBindingFailed: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. [ 567.348887] env[61905]: ERROR nova.compute.manager [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] [ 567.348887] env[61905]: DEBUG nova.compute.utils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 567.350249] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.177s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.358021] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Build of instance c4642eab-5ac8-41c4-93a5-8429525f5120 was re-scheduled: Binding failed for port 8055d540-f6ce-4a84-98b8-f088dca855c8, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 567.358021] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 567.358021] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.358021] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquired lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.358359] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.587428] env[61905]: INFO nova.compute.manager [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] [instance: 056d4c90-e763-4488-a349-1adaddfc9e95] Took 1.05 seconds to deallocate network for instance. [ 567.639814] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "2a778ae5-37be-4479-b7ff-4468d0433c86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.639814] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "2a778ae5-37be-4479-b7ff-4468d0433c86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.848262] env[61905]: DEBUG nova.compute.manager [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Received event network-changed-d630a755-6b72-4301-b52f-164a6e89ee82 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 567.848492] env[61905]: DEBUG nova.compute.manager [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Refreshing instance network info cache due to event network-changed-d630a755-6b72-4301-b52f-164a6e89ee82. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 567.848928] env[61905]: DEBUG oslo_concurrency.lockutils [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] Acquiring lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.860179] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.941867] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.125118] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.207048] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.278394] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd241b72-f76b-4523-997e-b0d9251f543a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.288782] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917f7596-b7ea-4701-b610-230769263943 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.321991] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4b63e3-c69c-4bd5-8a92-d15373388f58 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.330647] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebefaa0-e196-4c7f-abab-8ab954204f35 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.344753] env[61905]: DEBUG nova.compute.provider_tree [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.361256] env[61905]: INFO nova.compute.manager [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Rebuilding instance [ 568.402972] env[61905]: DEBUG nova.compute.manager [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 568.404051] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82e9317-5e5f-40ae-9b1a-96cbdbae859b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.631163] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Releasing lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.631782] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 568.631876] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 568.632228] env[61905]: DEBUG oslo_concurrency.lockutils [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] Acquired lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.633586] env[61905]: DEBUG nova.network.neutron [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Refreshing network info cache for port d630a755-6b72-4301-b52f-164a6e89ee82 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.633586] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bfbce85-556f-401a-9624-f2ae8d4b8fdf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.639259] env[61905]: INFO nova.scheduler.client.report [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Deleted allocations for instance 056d4c90-e763-4488-a349-1adaddfc9e95 [ 568.659020] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3963a40f-88dd-4277-93c5-8e9bebedbec7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.685605] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 70c8f43f-28f0-4097-a8cb-37f6654ec014 could not be found. [ 568.685902] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 568.686114] env[61905]: INFO nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Took 0.05 seconds to destroy the instance on the hypervisor. [ 568.686453] env[61905]: DEBUG oslo.service.loopingcall [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.686550] env[61905]: DEBUG nova.compute.manager [-] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.686685] env[61905]: DEBUG nova.network.neutron [-] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 568.717793] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Releasing lock "refresh_cache-c4642eab-5ac8-41c4-93a5-8429525f5120" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.718121] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 568.720612] env[61905]: DEBUG nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.720612] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 568.737593] env[61905]: DEBUG nova.network.neutron [-] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.763104] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.848536] env[61905]: DEBUG nova.scheduler.client.report [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.918164] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 568.918164] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4bdf8ed-ff73-4190-9f60-d5884f4d08dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.930241] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 568.930241] env[61905]: value = "task-1362269" [ 568.930241] env[61905]: _type = "Task" [ 568.930241] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.942316] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.168690] env[61905]: DEBUG nova.network.neutron [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 569.171223] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e4c275b-7dca-4402-ba8d-59867fa7f7a9 tempest-ServersAdminNegativeTestJSON-1302189117 tempest-ServersAdminNegativeTestJSON-1302189117-project-member] Lock "056d4c90-e763-4488-a349-1adaddfc9e95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.050s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.244079] env[61905]: DEBUG nova.network.neutron [-] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.268332] env[61905]: DEBUG nova.network.neutron [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.339786] env[61905]: DEBUG nova.network.neutron [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.356542] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.003s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.356542] env[61905]: ERROR nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Traceback (most recent call last): [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self.driver.spawn(context, instance, image_meta, [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.356542] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] vm_ref = self.build_virtual_machine(instance, [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] for vif in network_info: [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return self._sync_wrapper(fn, *args, **kwargs) [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self.wait() [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self[:] = self._gt.wait() [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return self._exit_event.wait() [ 569.356953] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] result = hub.switch() [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return self.greenlet.switch() [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] result = function(*args, **kwargs) [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] return func(*args, **kwargs) [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] raise e [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] nwinfo = self.network_api.allocate_for_instance( [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 569.357362] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] created_port_ids = self._update_ports_for_instance( [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] with excutils.save_and_reraise_exception(): [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] self.force_reraise() [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] raise self.value [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] updated_port = self._update_port( [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] _ensure_no_port_binding_failure(port) [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.357805] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] raise exception.PortBindingFailed(port_id=port['id']) [ 569.358166] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] nova.exception.PortBindingFailed: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. [ 569.358166] env[61905]: ERROR nova.compute.manager [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] [ 569.358166] env[61905]: DEBUG nova.compute.utils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 569.363326] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Build of instance d3ec2174-203c-43c6-9ecc-0a0d42fc35df was re-scheduled: Binding failed for port 55f4c076-6d0a-4b8a-ab49-a1e1b61c6edd, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 569.363326] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 569.363326] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquiring lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.363326] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Acquired lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.363561] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 569.363561] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.779s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.364909] env[61905]: INFO nova.compute.claims [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.443041] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362269, 'name': PowerOffVM_Task, 'duration_secs': 0.197236} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.443445] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 569.443445] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 569.444337] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b6b185-b063-4c19-a9dc-44c9ad00a5da {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.456160] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 569.456430] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b802a01-d15e-4b7d-8d56-2b3faefe3a17 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.490691] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 569.490933] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 569.491107] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleting the datastore file [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 569.491363] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94716a61-ba5b-4e7a-a9ad-3bbb7d0b4d6a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.500727] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 569.500727] env[61905]: value = "task-1362271" [ 569.500727] env[61905]: _type = "Task" [ 569.500727] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.512363] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.679949] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 569.750324] env[61905]: INFO nova.compute.manager [-] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Took 1.06 seconds to deallocate network for instance. [ 569.753748] env[61905]: DEBUG nova.compute.claims [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 569.753935] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.772073] env[61905]: INFO nova.compute.manager [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: c4642eab-5ac8-41c4-93a5-8429525f5120] Took 1.05 seconds to deallocate network for instance. [ 569.844297] env[61905]: DEBUG oslo_concurrency.lockutils [req-78f33cf6-1ef4-42b1-bdc7-d9efbb22dfcb req-4af29378-39c8-45b6-9d1d-0010c787655f service nova] Releasing lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.921521] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.016987] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100158} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.017836] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 570.017836] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 570.017836] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 570.086999] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.154049] env[61905]: DEBUG nova.compute.manager [req-320b2b50-aada-4bd0-9858-8325c08a2b20 req-59173109-fe2c-473d-89db-c7a5c8140279 service nova] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Received event network-vif-deleted-d630a755-6b72-4301-b52f-164a6e89ee82 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 570.214619] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.592668] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Releasing lock "refresh_cache-d3ec2174-203c-43c6-9ecc-0a0d42fc35df" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.592897] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 570.593089] env[61905]: DEBUG nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 570.593257] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 570.813546] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.818502] env[61905]: INFO nova.scheduler.client.report [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Deleted allocations for instance c4642eab-5ac8-41c4-93a5-8429525f5120 [ 570.853986] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2821eae8-2135-42b2-a28d-7039f96be482 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.864180] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2410272a-7c62-4638-a4f7-5d5e36286667 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.901306] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17161f88-a319-491e-9a31-e4ba10348267 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.909560] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd90ba6-ddbe-4a37-9d48-b3ef93b48586 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.924995] env[61905]: DEBUG nova.compute.provider_tree [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.063193] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.063445] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.063599] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.066790] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.066790] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.066790] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.066969] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.067123] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.067275] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.067602] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.067857] env[61905]: DEBUG nova.virt.hardware [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.068758] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e34ea72-b3cf-4c78-b385-c9a6c4b721a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.079991] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43b3389-2def-4aaa-9c75-e6728631c186 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.097317] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.104020] env[61905]: DEBUG oslo.service.loopingcall [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.104317] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 571.104654] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f73f84e-53fb-45e5-befb-d2eec076667a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.126803] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.126803] env[61905]: value = "task-1362272" [ 571.126803] env[61905]: _type = "Task" [ 571.126803] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.138275] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362272, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.327826] env[61905]: DEBUG nova.network.neutron [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.340645] env[61905]: DEBUG oslo_concurrency.lockutils [None req-345b8f39-ea11-4e7b-a589-ef7f5e0445fe tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "c4642eab-5ac8-41c4-93a5-8429525f5120" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.642s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.429694] env[61905]: DEBUG nova.scheduler.client.report [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.644432] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362272, 'name': CreateVM_Task, 'duration_secs': 0.271506} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.644605] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 571.645021] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.645224] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.645479] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 571.645750] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1afc16f-c1a2-4a7e-aded-371af44a8e55 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.653170] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 571.653170] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524328e7-3bc6-d8ea-41af-b76dd151a7a7" [ 571.653170] env[61905]: _type = "Task" [ 571.653170] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.662834] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524328e7-3bc6-d8ea-41af-b76dd151a7a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.830922] env[61905]: INFO nova.compute.manager [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] [instance: d3ec2174-203c-43c6-9ecc-0a0d42fc35df] Took 1.24 seconds to deallocate network for instance. [ 571.841930] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 571.934861] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.935366] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 571.938184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.930s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.939654] env[61905]: INFO nova.compute.claims [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.140373] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "03f9b48c-4bd1-4018-b34f-267e1575c753" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.140510] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "03f9b48c-4bd1-4018-b34f-267e1575c753" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.177532] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524328e7-3bc6-d8ea-41af-b76dd151a7a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009761} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.177532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.177532] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.177532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.177869] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.177869] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.177869] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5d34d30-cae4-485a-92c1-93dc08421881 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.189302] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.189514] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 572.191017] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5a9a214-1402-402f-9289-b396db29f95a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.196883] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 572.196883] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52776764-6f17-5536-0f3c-674fb3dc3c91" [ 572.196883] env[61905]: _type = "Task" [ 572.196883] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.208604] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52776764-6f17-5536-0f3c-674fb3dc3c91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.384047] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.446671] env[61905]: DEBUG nova.compute.utils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.455325] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 572.455495] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 572.715187] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52776764-6f17-5536-0f3c-674fb3dc3c91, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.716727] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4f24d96-3e26-47a9-9725-0ee698f0e1dd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.723806] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 572.723806] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5203ddbf-9a82-6df7-aebc-d27fa7d4b530" [ 572.723806] env[61905]: _type = "Task" [ 572.723806] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.735864] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5203ddbf-9a82-6df7-aebc-d27fa7d4b530, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.792196] env[61905]: DEBUG nova.policy [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dca9de21c31045f889fde3192cf6163e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4b037a2a3b84f6b97af049899db7935', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 572.885486] env[61905]: INFO nova.scheduler.client.report [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Deleted allocations for instance d3ec2174-203c-43c6-9ecc-0a0d42fc35df [ 572.964352] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 573.243219] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5203ddbf-9a82-6df7-aebc-d27fa7d4b530, 'name': SearchDatastore_Task, 'duration_secs': 0.012238} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.247222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.247222] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 573.249074] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a7210c5-5c03-48f2-8f69-731228dfd411 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.255888] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 573.255888] env[61905]: value = "task-1362273" [ 573.255888] env[61905]: _type = "Task" [ 573.255888] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.275116] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.363133] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c043a068-e8a9-4f40-b419-42695fd96d8e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.371977] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66317045-c769-4ed1-9bed-7ad5fc5c66dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.407808] env[61905]: DEBUG oslo_concurrency.lockutils [None req-da1549b7-ef8b-432f-930a-727358c6a15e tempest-ServerRescueTestJSONUnderV235-1481544633 tempest-ServerRescueTestJSONUnderV235-1481544633-project-member] Lock "d3ec2174-203c-43c6-9ecc-0a0d42fc35df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.719s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.412423] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90378bb8-0e4a-44ec-9762-eecb974d12fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.419792] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734d40a1-bed3-401f-9f83-1c25c048b1f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.437018] env[61905]: DEBUG nova.compute.provider_tree [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.616521] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Successfully created port: e483ef2f-6b51-4fda-bd1d-68909acd61e1 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.718963] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "84428003-72b1-467a-baf5-06ac37205622" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.719279] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "84428003-72b1-467a-baf5-06ac37205622" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.769888] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362273, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.914597] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 573.943486] env[61905]: DEBUG nova.scheduler.client.report [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.981891] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 574.011806] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 574.013147] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 574.013147] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 574.013147] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 574.013147] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 574.013147] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 574.013368] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 574.013368] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 574.013368] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 574.013567] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 574.013862] env[61905]: DEBUG nova.virt.hardware [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 574.014800] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a58abab-391d-4233-ad06-f1034f7af501 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.026285] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9657b37c-1175-4c34-bcaf-b39668ad9c36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.036020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquiring lock "7ae6338f-289f-415a-b261-3be2f9948572" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.036375] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Lock "7ae6338f-289f-415a-b261-3be2f9948572" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.271150] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362273, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515832} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.271150] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.271150] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 574.271150] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acae01a0-306b-4b4d-b957-d5c5bcb66d10 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.280556] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 574.280556] env[61905]: value = "task-1362274" [ 574.280556] env[61905]: _type = "Task" [ 574.280556] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.290996] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.450581] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.451123] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 574.455881] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.974s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.457271] env[61905]: INFO nova.compute.claims [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.462882] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.797562] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067775} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.797562] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 574.797562] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ea70dc-e9f0-4746-ad85-7817ecf4e1d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.822087] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 574.822457] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-392c5a53-6ef3-45d5-8f58-d2c4d70e1347 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.847038] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 574.847038] env[61905]: value = "task-1362275" [ 574.847038] env[61905]: _type = "Task" [ 574.847038] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.857145] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362275, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.965573] env[61905]: DEBUG nova.compute.utils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 574.973299] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 574.973487] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 575.127782] env[61905]: DEBUG nova.policy [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '759a45cc386b4baeb0848cb6c2c62927', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38a62ac305aa4da2b9d760ef38fcdea5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 575.366384] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362275, 'name': ReconfigVM_Task, 'duration_secs': 0.280173} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.369145] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 575.369145] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86f91a74-018a-4b62-8c8a-eddb6f7896d1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.377408] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 575.377408] env[61905]: value = "task-1362276" [ 575.377408] env[61905]: _type = "Task" [ 575.377408] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.386071] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362276, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.474145] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 575.891526] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362276, 'name': Rename_Task, 'duration_secs': 0.156696} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.891829] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 575.892125] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18624583-9cc3-4629-81e5-2ceae6831bb8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.906371] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 575.906371] env[61905]: value = "task-1362277" [ 575.906371] env[61905]: _type = "Task" [ 575.906371] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.920713] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.950746] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057fcd19-1a1b-4a8e-bfc8-f5b184a0e011 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.960384] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b7eeba-2704-4512-bb92-7a0588b35293 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.998656] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390b20dc-22f7-4411-aef8-5e5c1b85350c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.008490] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda8d2a4-fdc7-4e23-b425-a41a8562ac19 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.026405] env[61905]: DEBUG nova.compute.provider_tree [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.420589] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.472641] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Successfully created port: c01e0a2a-db66-4e6a-b17f-43cba56f21b6 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 576.506083] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 576.528215] env[61905]: DEBUG nova.scheduler.client.report [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.549601] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:13:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='933266694',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1784665331',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 576.550377] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 576.550526] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.550775] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 576.550965] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.551189] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 576.552348] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 576.552348] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 576.552348] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 576.552348] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 576.552348] env[61905]: DEBUG nova.virt.hardware [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 576.556025] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc52a95-7ab1-405c-a1b1-5eb1bf8dad1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.565922] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db34e7db-7038-4bf7-b555-a73fe5855842 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.721112] env[61905]: DEBUG nova.compute.manager [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Received event network-changed-e483ef2f-6b51-4fda-bd1d-68909acd61e1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 576.721248] env[61905]: DEBUG nova.compute.manager [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Refreshing instance network info cache due to event network-changed-e483ef2f-6b51-4fda-bd1d-68909acd61e1. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 576.721474] env[61905]: DEBUG oslo_concurrency.lockutils [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] Acquiring lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.721609] env[61905]: DEBUG oslo_concurrency.lockutils [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] Acquired lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.721760] env[61905]: DEBUG nova.network.neutron [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Refreshing network info cache for port e483ef2f-6b51-4fda-bd1d-68909acd61e1 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 576.922182] env[61905]: DEBUG oslo_vmware.api [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362277, 'name': PowerOnVM_Task, 'duration_secs': 0.677461} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.922182] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 576.922182] env[61905]: DEBUG nova.compute.manager [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 576.923419] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce52c1d8-62ea-4761-97c9-479e6b0e8d35 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.036022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.036022] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 577.037315] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.789s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.039834] env[61905]: INFO nova.compute.claims [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 577.318492] env[61905]: DEBUG nova.network.neutron [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 577.451878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.461424] env[61905]: ERROR nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 577.461424] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.461424] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.461424] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.461424] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.461424] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.461424] env[61905]: ERROR nova.compute.manager raise self.value [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.461424] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 577.461424] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.461424] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 577.461913] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.461913] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 577.461913] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 577.461913] env[61905]: ERROR nova.compute.manager [ 577.461913] env[61905]: Traceback (most recent call last): [ 577.461913] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 577.461913] env[61905]: listener.cb(fileno) [ 577.461913] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.461913] env[61905]: result = function(*args, **kwargs) [ 577.461913] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.461913] env[61905]: return func(*args, **kwargs) [ 577.461913] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.461913] env[61905]: raise e [ 577.461913] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.461913] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 577.461913] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.461913] env[61905]: created_port_ids = self._update_ports_for_instance( [ 577.461913] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.461913] env[61905]: with excutils.save_and_reraise_exception(): [ 577.461913] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.461913] env[61905]: self.force_reraise() [ 577.461913] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.461913] env[61905]: raise self.value [ 577.461913] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.461913] env[61905]: updated_port = self._update_port( [ 577.461913] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.461913] env[61905]: _ensure_no_port_binding_failure(port) [ 577.461913] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.461913] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 577.462714] env[61905]: nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 577.462714] env[61905]: Removing descriptor: 18 [ 577.462714] env[61905]: ERROR nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Traceback (most recent call last): [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] yield resources [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self.driver.spawn(context, instance, image_meta, [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self._vmops.spawn(context, instance, image_meta, injected_files, [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 577.462714] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] vm_ref = self.build_virtual_machine(instance, [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] vif_infos = vmwarevif.get_vif_info(self._session, [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] for vif in network_info: [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return self._sync_wrapper(fn, *args, **kwargs) [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self.wait() [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self[:] = self._gt.wait() [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return self._exit_event.wait() [ 577.463119] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] result = hub.switch() [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return self.greenlet.switch() [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] result = function(*args, **kwargs) [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return func(*args, **kwargs) [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] raise e [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] nwinfo = self.network_api.allocate_for_instance( [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 577.463596] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] created_port_ids = self._update_ports_for_instance( [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] with excutils.save_and_reraise_exception(): [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self.force_reraise() [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] raise self.value [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] updated_port = self._update_port( [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] _ensure_no_port_binding_failure(port) [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 577.464047] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] raise exception.PortBindingFailed(port_id=port['id']) [ 577.464387] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 577.464387] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] [ 577.464387] env[61905]: INFO nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Terminating instance [ 577.464994] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquiring lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.545214] env[61905]: DEBUG nova.compute.utils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.549682] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 577.549825] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 577.556612] env[61905]: DEBUG nova.network.neutron [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.668473] env[61905]: DEBUG nova.policy [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f5d62c68637455498751b44cd324829', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '427f61a90d444368978531f62cae9e05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 578.057308] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 578.060924] env[61905]: DEBUG oslo_concurrency.lockutils [req-6e1174cd-1280-4311-befd-c467f055f7b2 req-ab82d2b9-ae94-466c-b201-8b767eb033b7 service nova] Releasing lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.063292] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquired lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.063292] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 578.448746] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "7e393163-cd68-4de2-8051-7ec10415e508" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.449199] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "7e393163-cd68-4de2-8051-7ec10415e508" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.581588] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Successfully created port: 6463419a-5ed8-4493-9d1c-150d40412efc {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 578.627167] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.702263] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e488d64-02d0-427a-bc50-d5a4387dbad5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.711413] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbba9601-e082-4936-a33e-b1020dd22df7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.754308] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5485609-8fc8-44a8-9d06-f61c74375c32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.768224] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31510e75-9f2d-4e40-8e23-6eccc2cb0027 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.793925] env[61905]: DEBUG nova.compute.provider_tree [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.814320] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.076542] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 579.113743] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 579.114047] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 579.114230] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.114546] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 579.114621] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.114799] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 579.116092] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 579.116598] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 579.116957] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 579.117037] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 579.117285] env[61905]: DEBUG nova.virt.hardware [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 579.121786] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf4b9e2-3200-4380-aaec-ad1a0b531aa7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.139545] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ae0bf5-ac07-4c7f-8bc2-0a2a88824cd6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.297110] env[61905]: DEBUG nova.scheduler.client.report [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.322462] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Releasing lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.322462] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 579.322462] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 579.322462] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5ceef98-b03e-46d2-aef5-239f4e899671 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.336089] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb283bb1-ffd8-4156-be04-32f9449903d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.368145] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 46ce0987-e757-4ec7-9f85-bd84e50f2324 could not be found. [ 579.368387] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 579.368564] env[61905]: INFO nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Took 0.05 seconds to destroy the instance on the hypervisor. [ 579.368904] env[61905]: DEBUG oslo.service.loopingcall [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 579.369189] env[61905]: DEBUG nova.compute.manager [-] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 579.369282] env[61905]: DEBUG nova.network.neutron [-] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 579.412303] env[61905]: DEBUG nova.network.neutron [-] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 579.551699] env[61905]: DEBUG nova.compute.manager [req-4c33e560-4602-4859-b618-3836d5f572ea req-17987548-675e-4ef8-90fe-c5f4bbc22305 service nova] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Received event network-vif-deleted-e483ef2f-6b51-4fda-bd1d-68909acd61e1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 579.687068] env[61905]: INFO nova.compute.manager [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Rebuilding instance [ 579.741793] env[61905]: DEBUG nova.compute.manager [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 579.742904] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17750448-3266-456a-bbd1-f1734ba07ca9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.804280] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.807430] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 579.808563] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.828s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.810254] env[61905]: INFO nova.compute.claims [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.821050] env[61905]: ERROR nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 579.821050] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 579.821050] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.821050] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.821050] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.821050] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.821050] env[61905]: ERROR nova.compute.manager raise self.value [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.821050] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 579.821050] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.821050] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 579.822089] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.822089] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 579.822089] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 579.822089] env[61905]: ERROR nova.compute.manager [ 579.822089] env[61905]: Traceback (most recent call last): [ 579.822089] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 579.822089] env[61905]: listener.cb(fileno) [ 579.822089] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.822089] env[61905]: result = function(*args, **kwargs) [ 579.822089] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 579.822089] env[61905]: return func(*args, **kwargs) [ 579.822089] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 579.822089] env[61905]: raise e [ 579.822089] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 579.822089] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 579.822089] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.822089] env[61905]: created_port_ids = self._update_ports_for_instance( [ 579.822089] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.822089] env[61905]: with excutils.save_and_reraise_exception(): [ 579.822089] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.822089] env[61905]: self.force_reraise() [ 579.822089] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.822089] env[61905]: raise self.value [ 579.822089] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.822089] env[61905]: updated_port = self._update_port( [ 579.822089] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.822089] env[61905]: _ensure_no_port_binding_failure(port) [ 579.822089] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.822089] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 579.822988] env[61905]: nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 579.822988] env[61905]: Removing descriptor: 17 [ 579.823844] env[61905]: ERROR nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Traceback (most recent call last): [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] yield resources [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self.driver.spawn(context, instance, image_meta, [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] vm_ref = self.build_virtual_machine(instance, [ 579.823844] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] for vif in network_info: [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return self._sync_wrapper(fn, *args, **kwargs) [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self.wait() [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self[:] = self._gt.wait() [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return self._exit_event.wait() [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 579.824505] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] result = hub.switch() [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return self.greenlet.switch() [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] result = function(*args, **kwargs) [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return func(*args, **kwargs) [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] raise e [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] nwinfo = self.network_api.allocate_for_instance( [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] created_port_ids = self._update_ports_for_instance( [ 579.824922] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] with excutils.save_and_reraise_exception(): [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self.force_reraise() [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] raise self.value [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] updated_port = self._update_port( [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] _ensure_no_port_binding_failure(port) [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] raise exception.PortBindingFailed(port_id=port['id']) [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 579.825411] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] [ 579.825900] env[61905]: INFO nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Terminating instance [ 579.826455] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquiring lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.826628] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquired lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.826791] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 579.916557] env[61905]: DEBUG nova.network.neutron [-] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.255014] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 580.255348] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a053f4b-7808-4670-804c-2106aa2046bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.263451] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 580.263451] env[61905]: value = "task-1362281" [ 580.263451] env[61905]: _type = "Task" [ 580.263451] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.275480] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.315573] env[61905]: DEBUG nova.compute.utils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.319762] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 580.319944] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 580.359458] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.418970] env[61905]: INFO nova.compute.manager [-] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Took 1.05 seconds to deallocate network for instance. [ 580.421349] env[61905]: DEBUG nova.compute.claims [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 580.421518] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.485748] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.598109] env[61905]: DEBUG nova.policy [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f980f6592144fa3b79b520da7dda1dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91b4ea1dfb4749388b70f220819292e7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 580.778024] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362281, 'name': PowerOffVM_Task, 'duration_secs': 0.122616} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.778024] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 580.778024] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 580.778024] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3987f222-8cce-4a74-9a30-a2135f6a921b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.785717] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 580.785996] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01c91796-d7e7-475c-bdba-7719dd657c4c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.814239] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 580.816038] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 580.816038] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Deleting the datastore file [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.816038] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8bec43ec-3c90-4a5b-99f3-2081a5303e92 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.821376] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 580.829219] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 580.829219] env[61905]: value = "task-1362283" [ 580.829219] env[61905]: _type = "Task" [ 580.829219] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.840475] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.925618] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquiring lock "c7e66b30-f72d-4afd-aded-4a92dd19b388" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.925618] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Lock "c7e66b30-f72d-4afd-aded-4a92dd19b388" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.973766] env[61905]: ERROR nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 580.973766] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.973766] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.973766] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.973766] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.973766] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.973766] env[61905]: ERROR nova.compute.manager raise self.value [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.973766] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 580.973766] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.973766] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 580.974308] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.974308] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 580.974308] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 580.974308] env[61905]: ERROR nova.compute.manager [ 580.974308] env[61905]: Traceback (most recent call last): [ 580.974308] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 580.974308] env[61905]: listener.cb(fileno) [ 580.974308] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 580.974308] env[61905]: result = function(*args, **kwargs) [ 580.974308] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 580.974308] env[61905]: return func(*args, **kwargs) [ 580.974308] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 580.974308] env[61905]: raise e [ 580.974308] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.974308] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 580.974308] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.974308] env[61905]: created_port_ids = self._update_ports_for_instance( [ 580.974308] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.974308] env[61905]: with excutils.save_and_reraise_exception(): [ 580.974308] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.974308] env[61905]: self.force_reraise() [ 580.974308] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.974308] env[61905]: raise self.value [ 580.974308] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.974308] env[61905]: updated_port = self._update_port( [ 580.974308] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.974308] env[61905]: _ensure_no_port_binding_failure(port) [ 580.974308] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.974308] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 580.975184] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 580.975184] env[61905]: Removing descriptor: 18 [ 580.975184] env[61905]: ERROR nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Traceback (most recent call last): [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] yield resources [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self.driver.spawn(context, instance, image_meta, [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 580.975184] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] vm_ref = self.build_virtual_machine(instance, [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] vif_infos = vmwarevif.get_vif_info(self._session, [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] for vif in network_info: [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return self._sync_wrapper(fn, *args, **kwargs) [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self.wait() [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self[:] = self._gt.wait() [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return self._exit_event.wait() [ 580.975615] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] result = hub.switch() [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return self.greenlet.switch() [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] result = function(*args, **kwargs) [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return func(*args, **kwargs) [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] raise e [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] nwinfo = self.network_api.allocate_for_instance( [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 580.976122] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] created_port_ids = self._update_ports_for_instance( [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] with excutils.save_and_reraise_exception(): [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self.force_reraise() [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] raise self.value [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] updated_port = self._update_port( [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] _ensure_no_port_binding_failure(port) [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 580.976515] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] raise exception.PortBindingFailed(port_id=port['id']) [ 580.976882] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 580.976882] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] [ 580.976882] env[61905]: INFO nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Terminating instance [ 580.977939] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.977939] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquired lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.977939] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 580.994187] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Releasing lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.994187] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 580.994187] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 580.994187] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8bbfb3a3-1c3b-4b94-83a1-ed9082bfd4f1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.002315] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6b775d-d345-4bdd-bab5-cf5e4b60b2a2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.031771] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 195ac5ee-8da4-41e9-8c1b-291ea09c80c6 could not be found. [ 581.032015] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.032200] env[61905]: INFO nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 581.032431] env[61905]: DEBUG oslo.service.loopingcall [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.032741] env[61905]: DEBUG nova.compute.manager [-] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 581.032741] env[61905]: DEBUG nova.network.neutron [-] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 581.089241] env[61905]: DEBUG nova.network.neutron [-] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.301273] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Successfully created port: 040dcde9-8647-4d14-8b86-fa3902e34d2a {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.351137] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112869} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.351137] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 581.351354] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 581.351530] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.367706] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f7ba2f-8c40-4d7f-9027-f46dac16504f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.379389] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ba6ad3-d55a-491a-a618-15add7d4d705 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.411939] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0851ede0-fbca-4418-9277-0c67517e5186 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.420557] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e931284d-be1d-45b9-9c5e-b8425fb64cc6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.435795] env[61905]: DEBUG nova.compute.provider_tree [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.519278] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.593783] env[61905]: DEBUG nova.network.neutron [-] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.639506] env[61905]: DEBUG nova.compute.manager [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Received event network-changed-c01e0a2a-db66-4e6a-b17f-43cba56f21b6 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 581.640129] env[61905]: DEBUG nova.compute.manager [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Refreshing instance network info cache due to event network-changed-c01e0a2a-db66-4e6a-b17f-43cba56f21b6. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 581.640462] env[61905]: DEBUG oslo_concurrency.lockutils [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] Acquiring lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.640744] env[61905]: DEBUG oslo_concurrency.lockutils [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] Acquired lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.641550] env[61905]: DEBUG nova.network.neutron [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Refreshing network info cache for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 581.770509] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.845364] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 581.878993] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.878993] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.878993] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.879262] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.879305] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.879991] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.879991] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.879991] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.880279] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.880279] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.880579] env[61905]: DEBUG nova.virt.hardware [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.881478] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2c4fe7-0d4c-4491-8ea2-fe3df9e96e5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.893028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc0c9e1-4abf-4246-ae22-253b0961ede3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.940582] env[61905]: DEBUG nova.scheduler.client.report [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 582.098020] env[61905]: INFO nova.compute.manager [-] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Took 1.06 seconds to deallocate network for instance. [ 582.098959] env[61905]: DEBUG nova.compute.claims [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 582.098959] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.173210] env[61905]: DEBUG nova.network.neutron [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.255160] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.255525] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.279079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Releasing lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.279529] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 582.279712] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 582.280769] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d5ee0f0-6ae4-4fc1-8ff9-5ca9427196eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.297577] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8533cf0-6e1e-47fa-a6c0-ffb0c912921f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.325255] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b788bb84-07b9-4407-9e6e-cac6510166b7 could not be found. [ 582.325703] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 582.328016] env[61905]: INFO nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 582.328016] env[61905]: DEBUG oslo.service.loopingcall [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.328016] env[61905]: DEBUG nova.compute.manager [-] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 582.328016] env[61905]: DEBUG nova.network.neutron [-] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 582.347907] env[61905]: DEBUG nova.network.neutron [-] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.376228] env[61905]: DEBUG nova.network.neutron [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.402101] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.402101] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.402101] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.402101] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.402898] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.402898] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.402898] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.402898] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.402898] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.403173] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.403173] env[61905]: DEBUG nova.virt.hardware [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.403960] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d03f304-5014-480a-8689-b58a436fe902 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.417118] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0060cbe-4f7d-4568-92cf-7055588e545f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.433187] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 582.438875] env[61905]: DEBUG oslo.service.loopingcall [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.439147] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 582.439367] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52e09521-9927-4c99-92c2-d9c8a45ca186 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.452035] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.452541] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 582.459273] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.489s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.459273] env[61905]: INFO nova.compute.claims [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.468810] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 582.468810] env[61905]: value = "task-1362285" [ 582.468810] env[61905]: _type = "Task" [ 582.468810] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.480699] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362285, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.763420] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 582.767024] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 582.767024] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 582.850428] env[61905]: DEBUG nova.network.neutron [-] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.882960] env[61905]: DEBUG oslo_concurrency.lockutils [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] Releasing lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.883263] env[61905]: DEBUG nova.compute.manager [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Received event network-vif-deleted-c01e0a2a-db66-4e6a-b17f-43cba56f21b6 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 582.883457] env[61905]: DEBUG nova.compute.manager [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Received event network-changed-6463419a-5ed8-4493-9d1c-150d40412efc {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 582.883646] env[61905]: DEBUG nova.compute.manager [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Refreshing instance network info cache due to event network-changed-6463419a-5ed8-4493-9d1c-150d40412efc. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 582.883887] env[61905]: DEBUG oslo_concurrency.lockutils [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] Acquiring lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.884054] env[61905]: DEBUG oslo_concurrency.lockutils [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] Acquired lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.884215] env[61905]: DEBUG nova.network.neutron [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Refreshing network info cache for port 6463419a-5ed8-4493-9d1c-150d40412efc {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 582.964370] env[61905]: DEBUG nova.compute.utils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.971314] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 582.971523] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.985172] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362285, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.073953] env[61905]: DEBUG nova.policy [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f5d62c68637455498751b44cd324829', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '427f61a90d444368978531f62cae9e05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 583.199311] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "0a1e2a21-a43d-4363-9f1f-683e35d199aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.199447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "0a1e2a21-a43d-4363-9f1f-683e35d199aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.270708] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.270843] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.270935] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.271070] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.271186] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.271344] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.271419] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 583.271604] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-72770472-1b79-4408-b32c-34e56fd27c45" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.271767] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-72770472-1b79-4408-b32c-34e56fd27c45" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.271853] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 583.271983] env[61905]: DEBUG nova.objects.instance [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lazy-loading 'info_cache' on Instance uuid 72770472-1b79-4408-b32c-34e56fd27c45 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 583.320262] env[61905]: ERROR nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 583.320262] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.320262] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.320262] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.320262] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.320262] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.320262] env[61905]: ERROR nova.compute.manager raise self.value [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.320262] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 583.320262] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.320262] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 583.321136] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.321136] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 583.321136] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 583.321136] env[61905]: ERROR nova.compute.manager [ 583.321136] env[61905]: Traceback (most recent call last): [ 583.321136] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 583.321136] env[61905]: listener.cb(fileno) [ 583.321136] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.321136] env[61905]: result = function(*args, **kwargs) [ 583.321136] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.321136] env[61905]: return func(*args, **kwargs) [ 583.321136] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.321136] env[61905]: raise e [ 583.321136] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.321136] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 583.321136] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.321136] env[61905]: created_port_ids = self._update_ports_for_instance( [ 583.321136] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.321136] env[61905]: with excutils.save_and_reraise_exception(): [ 583.321136] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.321136] env[61905]: self.force_reraise() [ 583.321136] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.321136] env[61905]: raise self.value [ 583.321136] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.321136] env[61905]: updated_port = self._update_port( [ 583.321136] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.321136] env[61905]: _ensure_no_port_binding_failure(port) [ 583.321136] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.321136] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 583.321948] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 583.321948] env[61905]: Removing descriptor: 17 [ 583.321948] env[61905]: ERROR nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Traceback (most recent call last): [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] yield resources [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self.driver.spawn(context, instance, image_meta, [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.321948] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] vm_ref = self.build_virtual_machine(instance, [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] for vif in network_info: [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return self._sync_wrapper(fn, *args, **kwargs) [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self.wait() [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self[:] = self._gt.wait() [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return self._exit_event.wait() [ 583.322311] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] result = hub.switch() [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return self.greenlet.switch() [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] result = function(*args, **kwargs) [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return func(*args, **kwargs) [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] raise e [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] nwinfo = self.network_api.allocate_for_instance( [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.322661] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] created_port_ids = self._update_ports_for_instance( [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] with excutils.save_and_reraise_exception(): [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self.force_reraise() [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] raise self.value [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] updated_port = self._update_port( [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] _ensure_no_port_binding_failure(port) [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.323044] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] raise exception.PortBindingFailed(port_id=port['id']) [ 583.323389] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 583.323389] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] [ 583.323389] env[61905]: INFO nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Terminating instance [ 583.324090] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquiring lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.324184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquired lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.324300] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.353393] env[61905]: INFO nova.compute.manager [-] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Took 1.03 seconds to deallocate network for instance. [ 583.355756] env[61905]: DEBUG nova.compute.claims [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 583.355756] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.414240] env[61905]: DEBUG nova.network.neutron [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.474297] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 583.493293] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362285, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.590603] env[61905]: DEBUG nova.network.neutron [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.654317] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Successfully created port: c5ee59c6-83ee-4c77-b728-0da1d37e65ac {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 583.807723] env[61905]: DEBUG nova.compute.manager [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Received event network-changed-040dcde9-8647-4d14-8b86-fa3902e34d2a {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 583.807915] env[61905]: DEBUG nova.compute.manager [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Refreshing instance network info cache due to event network-changed-040dcde9-8647-4d14-8b86-fa3902e34d2a. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 583.808147] env[61905]: DEBUG oslo_concurrency.lockutils [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] Acquiring lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.861204] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.001153] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362285, 'name': CreateVM_Task, 'duration_secs': 1.344268} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.001153] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 584.001153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.001153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.001153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 584.001476] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-661c564f-2e44-44cc-a6e8-9d8d315e17b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.009901] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 584.009901] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527327a2-5c22-479b-0568-f6da9611b946" [ 584.009901] env[61905]: _type = "Task" [ 584.009901] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.022052] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527327a2-5c22-479b-0568-f6da9611b946, 'name': SearchDatastore_Task, 'duration_secs': 0.008905} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.025463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.025705] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 584.025967] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.026993] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.026993] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 584.027104] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05498122-7a58-47a0-915f-ba6cd0df2b65 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.040265] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 584.040265] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 584.042854] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-784c3572-a136-4ad7-88b5-cc83cb0143c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.050261] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 584.050261] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52018705-6fa5-8a68-8e82-99f379464e3a" [ 584.050261] env[61905]: _type = "Task" [ 584.050261] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.058511] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52018705-6fa5-8a68-8e82-99f379464e3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.098757] env[61905]: DEBUG oslo_concurrency.lockutils [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] Releasing lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.098888] env[61905]: DEBUG nova.compute.manager [req-cbe1bde2-0e32-4fa7-9357-2ae8892dcee2 req-769b3529-f198-43eb-b47e-1ab2a02504b6 service nova] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Received event network-vif-deleted-6463419a-5ed8-4493-9d1c-150d40412efc {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 584.106386] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652202ec-2319-48eb-aba6-91f11dc1980f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.115128] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc2afe7-fb8d-42a2-abbb-c2a213567883 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.155245] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15913044-c7db-4b8b-b9ae-833771ef0fbf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.166191] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4add83c8-3129-491a-ab12-18cee95c2752 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.173665] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.173665] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.185501] env[61905]: DEBUG nova.compute.provider_tree [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.225465] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.311890] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.495633] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 584.532553] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 584.533057] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 584.533273] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.533502] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 584.533847] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.534018] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 584.534229] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 584.534382] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 584.534542] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 584.534696] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 584.534867] env[61905]: DEBUG nova.virt.hardware [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 584.535761] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccab7cff-15d3-45fa-a930-4360a614cd41 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.545525] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f6ac1a-337f-4461-93e0-cfa4e00dae6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.558985] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52018705-6fa5-8a68-8e82-99f379464e3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.568224] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9197401-8d5d-4943-b083-eebca9e85122 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.574193] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 584.574193] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52de28e2-0a03-c068-d698-9b30c788183d" [ 584.574193] env[61905]: _type = "Task" [ 584.574193] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.583522] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52de28e2-0a03-c068-d698-9b30c788183d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.692411] env[61905]: DEBUG nova.scheduler.client.report [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.728292] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Releasing lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.731110] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 584.731110] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.731110] env[61905]: DEBUG oslo_concurrency.lockutils [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] Acquired lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.731110] env[61905]: DEBUG nova.network.neutron [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Refreshing network info cache for port 040dcde9-8647-4d14-8b86-fa3902e34d2a {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 584.731110] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-079531cd-7f09-4ddc-aef1-a6d417de2a51 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.745263] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee6891f-5949-4d68-90f1-152e24444d7a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.774838] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88c496a6-8007-4111-8ac1-6e0f8680ef24 could not be found. [ 584.775404] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 584.775637] env[61905]: INFO nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Took 0.05 seconds to destroy the instance on the hypervisor. [ 584.775988] env[61905]: DEBUG oslo.service.loopingcall [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.776875] env[61905]: DEBUG nova.compute.manager [-] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.777135] env[61905]: DEBUG nova.network.neutron [-] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 584.798939] env[61905]: DEBUG nova.network.neutron [-] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.910278] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.080859] env[61905]: ERROR nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 585.080859] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.080859] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.080859] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.080859] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.080859] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.080859] env[61905]: ERROR nova.compute.manager raise self.value [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.080859] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 585.080859] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.080859] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 585.081253] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.081253] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 585.081253] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 585.081253] env[61905]: ERROR nova.compute.manager [ 585.081253] env[61905]: Traceback (most recent call last): [ 585.081253] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 585.081253] env[61905]: listener.cb(fileno) [ 585.081253] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.081253] env[61905]: result = function(*args, **kwargs) [ 585.081253] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 585.081253] env[61905]: return func(*args, **kwargs) [ 585.081253] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.081253] env[61905]: raise e [ 585.081253] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.081253] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 585.081253] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.081253] env[61905]: created_port_ids = self._update_ports_for_instance( [ 585.081253] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.081253] env[61905]: with excutils.save_and_reraise_exception(): [ 585.081253] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.081253] env[61905]: self.force_reraise() [ 585.081253] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.081253] env[61905]: raise self.value [ 585.081253] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.081253] env[61905]: updated_port = self._update_port( [ 585.081253] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.081253] env[61905]: _ensure_no_port_binding_failure(port) [ 585.081253] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.081253] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 585.082040] env[61905]: nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 585.082040] env[61905]: Removing descriptor: 18 [ 585.082040] env[61905]: ERROR nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Traceback (most recent call last): [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] yield resources [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self.driver.spawn(context, instance, image_meta, [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.082040] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] vm_ref = self.build_virtual_machine(instance, [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] for vif in network_info: [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return self._sync_wrapper(fn, *args, **kwargs) [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self.wait() [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self[:] = self._gt.wait() [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return self._exit_event.wait() [ 585.082342] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] result = hub.switch() [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return self.greenlet.switch() [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] result = function(*args, **kwargs) [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return func(*args, **kwargs) [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] raise e [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] nwinfo = self.network_api.allocate_for_instance( [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 585.082619] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] created_port_ids = self._update_ports_for_instance( [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] with excutils.save_and_reraise_exception(): [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self.force_reraise() [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] raise self.value [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] updated_port = self._update_port( [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] _ensure_no_port_binding_failure(port) [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.082897] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] raise exception.PortBindingFailed(port_id=port['id']) [ 585.083166] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 585.083166] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] [ 585.083166] env[61905]: INFO nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Terminating instance [ 585.086957] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52de28e2-0a03-c068-d698-9b30c788183d, 'name': SearchDatastore_Task, 'duration_secs': 0.01008} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.087484] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.087634] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquired lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.087792] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.088833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.089089] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 585.089336] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddfd21a3-fbdf-442e-8b5d-29940739e953 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.097774] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 585.097774] env[61905]: value = "task-1362287" [ 585.097774] env[61905]: _type = "Task" [ 585.097774] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.110647] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.198027] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.198027] env[61905]: DEBUG nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 585.199382] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.445s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.258208] env[61905]: DEBUG nova.network.neutron [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.301425] env[61905]: DEBUG nova.network.neutron [-] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.361956] env[61905]: DEBUG nova.network.neutron [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.412871] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-72770472-1b79-4408-b32c-34e56fd27c45" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.413105] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 585.413323] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.413476] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.413757] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.413757] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.413884] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.414033] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.415166] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 585.415166] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 585.611469] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362287, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487516} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.613824] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 585.614702] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 585.615074] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb876cfc-2fb1-4163-aeec-8bba7eb634cb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.617595] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.629691] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 585.629691] env[61905]: value = "task-1362288" [ 585.629691] env[61905]: _type = "Task" [ 585.629691] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.639392] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.706205] env[61905]: DEBUG nova.compute.utils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.711959] env[61905]: DEBUG nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 585.717705] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.813366] env[61905]: INFO nova.compute.manager [-] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Took 1.04 seconds to deallocate network for instance. [ 585.819433] env[61905]: DEBUG nova.compute.claims [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 585.819433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.836526] env[61905]: DEBUG nova.compute.manager [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Received event network-changed-c5ee59c6-83ee-4c77-b728-0da1d37e65ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 585.837397] env[61905]: DEBUG nova.compute.manager [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Refreshing instance network info cache due to event network-changed-c5ee59c6-83ee-4c77-b728-0da1d37e65ac. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 585.837397] env[61905]: DEBUG oslo_concurrency.lockutils [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] Acquiring lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.865166] env[61905]: DEBUG oslo_concurrency.lockutils [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] Releasing lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.865345] env[61905]: DEBUG nova.compute.manager [req-cca0e68c-2eef-4795-ada7-25189044c0d8 req-767d2b05-8d84-48ee-8481-804f236705da service nova] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Received event network-vif-deleted-040dcde9-8647-4d14-8b86-fa3902e34d2a {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 585.918552] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.144857] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073635} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.145185] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 586.146728] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bde50b-6f3c-41ad-ae7e-46dfdddfcdca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.168868] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 586.172149] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05067770-2eac-42f4-b7f6-20f90b68afc1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.193064] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 586.193064] env[61905]: value = "task-1362290" [ 586.193064] env[61905]: _type = "Task" [ 586.193064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.201113] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c12155-c26b-41be-bb34-fe90a2bca060 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.211942] env[61905]: DEBUG nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 586.214480] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.217407] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b86f27-d22c-4dc6-830a-67573ce93342 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.220646] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Releasing lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.221016] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 586.221211] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.221499] env[61905]: DEBUG oslo_concurrency.lockutils [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] Acquired lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.221791] env[61905]: DEBUG nova.network.neutron [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Refreshing network info cache for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 586.222737] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28dedbc8-2a54-4110-a210-8abb6a203170 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.259396] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47abc80-6c12-4eee-bc25-3f2f64e7f1b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.271943] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b116429-b09f-4d74-8975-2f212dc40a33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.288424] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e could not be found. [ 586.288677] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.288857] env[61905]: INFO nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Took 0.07 seconds to destroy the instance on the hypervisor. [ 586.289110] env[61905]: DEBUG oslo.service.loopingcall [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.290983] env[61905]: DEBUG nova.compute.manager [-] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.291105] env[61905]: DEBUG nova.network.neutron [-] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.293938] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df87e2d7-3583-4367-8f39-17c5d12cee6c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.308166] env[61905]: DEBUG nova.compute.provider_tree [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.317320] env[61905]: DEBUG nova.network.neutron [-] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.706270] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362290, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.748896] env[61905]: DEBUG nova.network.neutron [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.813774] env[61905]: DEBUG nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.820454] env[61905]: DEBUG nova.network.neutron [-] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.912855] env[61905]: DEBUG nova.network.neutron [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.208210] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362290, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.222491] env[61905]: DEBUG nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 587.252976] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.253205] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.253362] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.253539] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.253680] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.253823] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.254655] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.254917] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.255170] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.255384] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.255593] env[61905]: DEBUG nova.virt.hardware [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.256508] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5879e0-0be8-49bf-8dc6-22523da73efa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.265092] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad92d7d1-efd7-407c-bd07-cf7ca91a07a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.281696] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.287388] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Creating folder: Project (fad554f9959342e4b86491c199fd03f8). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.287681] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ff03955-e415-4f17-87a0-a8e69b67e0e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.300489] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Created folder: Project (fad554f9959342e4b86491c199fd03f8) in parent group-v289968. [ 587.300683] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Creating folder: Instances. Parent ref: group-v289981. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.300983] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7821e293-afd8-4ec3-a9fe-b064852e6650 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.312429] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Created folder: Instances in parent group-v289981. [ 587.312429] env[61905]: DEBUG oslo.service.loopingcall [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.312429] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 587.312429] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55287930-b75a-431b-a56c-008429127b17 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.329989] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.130s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.329989] env[61905]: ERROR nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Traceback (most recent call last): [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self.driver.spawn(context, instance, image_meta, [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self._vmops.spawn(context, instance, image_meta, injected_files, [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 587.329989] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] vm_ref = self.build_virtual_machine(instance, [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] vif_infos = vmwarevif.get_vif_info(self._session, [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] for vif in network_info: [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return self._sync_wrapper(fn, *args, **kwargs) [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self.wait() [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self[:] = self._gt.wait() [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return self._exit_event.wait() [ 587.330356] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] result = hub.switch() [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return self.greenlet.switch() [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] result = function(*args, **kwargs) [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] return func(*args, **kwargs) [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] raise e [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] nwinfo = self.network_api.allocate_for_instance( [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 587.330620] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] created_port_ids = self._update_ports_for_instance( [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] with excutils.save_and_reraise_exception(): [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] self.force_reraise() [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] raise self.value [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] updated_port = self._update_port( [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] _ensure_no_port_binding_failure(port) [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 587.330898] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] raise exception.PortBindingFailed(port_id=port['id']) [ 587.331170] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] nova.exception.PortBindingFailed: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. [ 587.331170] env[61905]: ERROR nova.compute.manager [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] [ 587.331170] env[61905]: DEBUG nova.compute.utils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 587.336032] env[61905]: INFO nova.compute.manager [-] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Took 1.04 seconds to deallocate network for instance. [ 587.336032] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.121s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.336032] env[61905]: INFO nova.compute.claims [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.341702] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Build of instance 70c8f43f-28f0-4097-a8cb-37f6654ec014 was re-scheduled: Binding failed for port d630a755-6b72-4301-b52f-164a6e89ee82, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 587.342196] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 587.342460] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquiring lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.342611] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Acquired lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.346018] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 587.347208] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.347208] env[61905]: value = "task-1362293" [ 587.347208] env[61905]: _type = "Task" [ 587.347208] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.357230] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362293, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.365306] env[61905]: DEBUG nova.compute.claims [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 587.365490] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.419471] env[61905]: DEBUG oslo_concurrency.lockutils [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] Releasing lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.419773] env[61905]: DEBUG nova.compute.manager [req-6215abcb-576a-4cf2-8bfa-d91221379232 req-ee6fc3b1-a219-4d9e-8622-a5dd3d7fa07e service nova] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Received event network-vif-deleted-c5ee59c6-83ee-4c77-b728-0da1d37e65ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 587.707587] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362290, 'name': ReconfigVM_Task, 'duration_secs': 1.360442} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.708327] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45/72770472-1b79-4408-b32c-34e56fd27c45.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 587.708670] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c71518c-43d8-42bc-88d7-7ef2124c7129 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.716618] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 587.716618] env[61905]: value = "task-1362294" [ 587.716618] env[61905]: _type = "Task" [ 587.716618] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.725346] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362294, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.861802] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362293, 'name': CreateVM_Task, 'duration_secs': 0.324648} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.862017] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 587.862535] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.862785] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.863468] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 587.863468] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8af71729-6531-46fe-9a15-8d5be4df10bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.868977] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 587.868977] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a527c6-9685-e618-a102-8932ceda1f03" [ 587.868977] env[61905]: _type = "Task" [ 587.868977] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.873341] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.882038] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a527c6-9685-e618-a102-8932ceda1f03, 'name': SearchDatastore_Task, 'duration_secs': 0.009578} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.882324] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.882542] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 587.882801] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.882950] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.883143] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.883396] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1110900-aaaa-460c-b316-95a4d2bdab4b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.891956] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.892074] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 587.892753] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-386fd4b6-a951-4b78-acd1-9e9d03e083fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.900726] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 587.900726] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525e1861-1eb7-61bc-987f-3f667081c880" [ 587.900726] env[61905]: _type = "Task" [ 587.900726] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.908629] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525e1861-1eb7-61bc-987f-3f667081c880, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.952995] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.227480] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362294, 'name': Rename_Task, 'duration_secs': 0.146831} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.227790] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 588.228020] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-094442dc-70a5-481d-bb4d-7f44be4047cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.235810] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Waiting for the task: (returnval){ [ 588.235810] env[61905]: value = "task-1362296" [ 588.235810] env[61905]: _type = "Task" [ 588.235810] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.242714] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.413269] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525e1861-1eb7-61bc-987f-3f667081c880, 'name': SearchDatastore_Task, 'duration_secs': 0.009138} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.416679] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef9258f-cad6-41c1-9e00-21503e71381a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.422884] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 588.422884] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52539430-76df-d407-7ed0-1bfd5a19756f" [ 588.422884] env[61905]: _type = "Task" [ 588.422884] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.432612] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52539430-76df-d407-7ed0-1bfd5a19756f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.461456] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Releasing lock "refresh_cache-70c8f43f-28f0-4097-a8cb-37f6654ec014" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.461456] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 588.461619] env[61905]: DEBUG nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.461756] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 588.480962] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 588.715035] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquiring lock "ef6e5c2c-1778-4079-ae35-55f9264a060d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.715035] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Lock "ef6e5c2c-1778-4079-ae35-55f9264a060d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.747801] env[61905]: DEBUG oslo_vmware.api [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Task: {'id': task-1362296, 'name': PowerOnVM_Task, 'duration_secs': 0.469169} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.747801] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 588.747801] env[61905]: DEBUG nova.compute.manager [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 588.748709] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270d497b-6ec6-4ddb-81af-3be47d4ff7f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.842449] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed8d14d-70bb-4699-afc5-9ef1098eb4b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.850891] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3832e805-4ceb-4e7d-9503-4edad9e2639d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.881105] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9271ce4-6ebb-4105-8e19-153fa9bda121 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.889186] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5074e0b8-5d99-468d-8c7b-35c11ad05df4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.904699] env[61905]: DEBUG nova.compute.provider_tree [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.937022] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52539430-76df-d407-7ed0-1bfd5a19756f, 'name': SearchDatastore_Task, 'duration_secs': 0.009742} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.937022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.937022] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 2a8bcc04-5519-4890-839b-64dcf422526d/2a8bcc04-5519-4890-839b-64dcf422526d.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 588.937022] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d7b03dc-587b-4f26-82a3-fb8a1b935077 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.942780] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 588.942780] env[61905]: value = "task-1362297" [ 588.942780] env[61905]: _type = "Task" [ 588.942780] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.951863] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.984065] env[61905]: DEBUG nova.network.neutron [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.272094] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.410898] env[61905]: DEBUG nova.scheduler.client.report [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.455711] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362297, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.486569] env[61905]: INFO nova.compute.manager [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] [instance: 70c8f43f-28f0-4097-a8cb-37f6654ec014] Took 1.02 seconds to deallocate network for instance. [ 589.915895] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.916829] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 589.919434] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.535s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.920754] env[61905]: INFO nova.compute.claims [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.955062] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362297, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696074} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.956048] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 2a8bcc04-5519-4890-839b-64dcf422526d/2a8bcc04-5519-4890-839b-64dcf422526d.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 589.956276] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 589.956522] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0725d015-a4c0-4363-9361-a1e1b14e5677 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.964282] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 589.964282] env[61905]: value = "task-1362298" [ 589.964282] env[61905]: _type = "Task" [ 589.964282] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.974908] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.253827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "72770472-1b79-4408-b32c-34e56fd27c45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.253827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "72770472-1b79-4408-b32c-34e56fd27c45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.253827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "72770472-1b79-4408-b32c-34e56fd27c45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.253827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "72770472-1b79-4408-b32c-34e56fd27c45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.253995] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "72770472-1b79-4408-b32c-34e56fd27c45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.255962] env[61905]: INFO nova.compute.manager [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Terminating instance [ 590.257560] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "refresh_cache-72770472-1b79-4408-b32c-34e56fd27c45" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.258476] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquired lock "refresh_cache-72770472-1b79-4408-b32c-34e56fd27c45" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.258476] env[61905]: DEBUG nova.network.neutron [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.425469] env[61905]: DEBUG nova.compute.utils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.430092] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 590.430092] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.474260] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080108} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.474525] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 590.475724] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b893ca71-e95e-41bc-83c1-4de2d6d95c49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.496532] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 2a8bcc04-5519-4890-839b-64dcf422526d/2a8bcc04-5519-4890-839b-64dcf422526d.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 590.498064] env[61905]: DEBUG nova.policy [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 590.501578] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff9078d9-6348-450e-9074-aaef268bbbe4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.522293] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 590.522293] env[61905]: value = "task-1362300" [ 590.522293] env[61905]: _type = "Task" [ 590.522293] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.532964] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.537928] env[61905]: INFO nova.scheduler.client.report [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Deleted allocations for instance 70c8f43f-28f0-4097-a8cb-37f6654ec014 [ 590.789613] env[61905]: DEBUG nova.network.neutron [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.845291] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Successfully created port: 2d69d1c6-de7b-44a1-943a-5aec1d543aa3 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.889769] env[61905]: DEBUG nova.network.neutron [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.930019] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 591.034550] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362300, 'name': ReconfigVM_Task, 'duration_secs': 0.318979} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.034866] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 2a8bcc04-5519-4890-839b-64dcf422526d/2a8bcc04-5519-4890-839b-64dcf422526d.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 591.035489] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e32c6cc3-d23d-4950-9c33-a1240632076f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.045599] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8d208232-1f88-4aa1-b20d-3d8553d3ee74 tempest-ListImageFiltersTestJSON-260375986 tempest-ListImageFiltersTestJSON-260375986-project-member] Lock "70c8f43f-28f0-4097-a8cb-37f6654ec014" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.117s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.049257] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 591.049257] env[61905]: value = "task-1362301" [ 591.049257] env[61905]: _type = "Task" [ 591.049257] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.065412] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362301, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.391711] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea7a790-f8e1-4d1a-8026-a40efe0de368 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.394813] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Releasing lock "refresh_cache-72770472-1b79-4408-b32c-34e56fd27c45" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.395238] env[61905]: DEBUG nova.compute.manager [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 591.395438] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 591.396268] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdcd7e6-5c0d-47cf-9ef3-9b5e2f064571 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.406481] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525b1bb0-f520-4e83-92ac-6172d49bee5e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.410072] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 591.410348] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dedfe11c-7cbf-4bbe-b6f0-f42e157af2a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.453180] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2aabd9-2b19-46e3-9951-46541664add3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.455370] env[61905]: DEBUG oslo_vmware.api [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 591.455370] env[61905]: value = "task-1362302" [ 591.455370] env[61905]: _type = "Task" [ 591.455370] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.465734] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f4a1f7-d668-45fb-9317-55d34f95b583 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.470721] env[61905]: DEBUG oslo_vmware.api [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.483608] env[61905]: DEBUG nova.compute.provider_tree [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.550676] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 591.564750] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362301, 'name': Rename_Task, 'duration_secs': 0.151633} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.565035] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 591.565293] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04213267-a567-4847-af76-955cf5620b80 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.572761] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 591.572761] env[61905]: value = "task-1362303" [ 591.572761] env[61905]: _type = "Task" [ 591.572761] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.581468] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.826754] env[61905]: DEBUG nova.compute.manager [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Received event network-changed-2d69d1c6-de7b-44a1-943a-5aec1d543aa3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 591.826969] env[61905]: DEBUG nova.compute.manager [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Refreshing instance network info cache due to event network-changed-2d69d1c6-de7b-44a1-943a-5aec1d543aa3. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 591.827394] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] Acquiring lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.827538] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] Acquired lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.827775] env[61905]: DEBUG nova.network.neutron [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Refreshing network info cache for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 591.957848] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 591.970373] env[61905]: DEBUG oslo_vmware.api [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362302, 'name': PowerOffVM_Task, 'duration_secs': 0.12497} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.970696] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 591.970754] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 591.971346] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d8108c3-18ee-4ddb-806a-074f291126be {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.986889] env[61905]: DEBUG nova.scheduler.client.report [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 592.002972] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 592.003051] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 592.004736] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleting the datastore file [datastore2] 72770472-1b79-4408-b32c-34e56fd27c45 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 592.004736] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c9cc438-9042-486d-b677-f6e009538701 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.011472] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.011756] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.012295] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.012544] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.012722] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.012896] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.013270] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.013372] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.013614] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.013833] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.014058] env[61905]: DEBUG nova.virt.hardware [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.015244] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab6f961-63a2-4409-a7ca-1472fd56b36e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.021335] env[61905]: DEBUG oslo_vmware.api [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for the task: (returnval){ [ 592.021335] env[61905]: value = "task-1362305" [ 592.021335] env[61905]: _type = "Task" [ 592.021335] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.030146] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e99503-e667-4236-b85e-7a32690e0c1f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.034613] env[61905]: ERROR nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 592.034613] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.034613] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.034613] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.034613] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.034613] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.034613] env[61905]: ERROR nova.compute.manager raise self.value [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.034613] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 592.034613] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.034613] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 592.035039] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.035039] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 592.035039] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 592.035039] env[61905]: ERROR nova.compute.manager [ 592.035039] env[61905]: Traceback (most recent call last): [ 592.035039] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 592.035039] env[61905]: listener.cb(fileno) [ 592.035039] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.035039] env[61905]: result = function(*args, **kwargs) [ 592.035039] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 592.035039] env[61905]: return func(*args, **kwargs) [ 592.035039] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.035039] env[61905]: raise e [ 592.035039] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.035039] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 592.035039] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.035039] env[61905]: created_port_ids = self._update_ports_for_instance( [ 592.035039] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.035039] env[61905]: with excutils.save_and_reraise_exception(): [ 592.035039] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.035039] env[61905]: self.force_reraise() [ 592.035039] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.035039] env[61905]: raise self.value [ 592.035039] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.035039] env[61905]: updated_port = self._update_port( [ 592.035039] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.035039] env[61905]: _ensure_no_port_binding_failure(port) [ 592.035039] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.035039] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 592.035713] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 592.035713] env[61905]: Removing descriptor: 18 [ 592.038572] env[61905]: DEBUG oslo_vmware.api [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.050195] env[61905]: ERROR nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Traceback (most recent call last): [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] yield resources [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self.driver.spawn(context, instance, image_meta, [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] vm_ref = self.build_virtual_machine(instance, [ 592.050195] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] for vif in network_info: [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] return self._sync_wrapper(fn, *args, **kwargs) [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self.wait() [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self[:] = self._gt.wait() [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] return self._exit_event.wait() [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 592.050666] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] current.throw(*self._exc) [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] result = function(*args, **kwargs) [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] return func(*args, **kwargs) [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] raise e [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] nwinfo = self.network_api.allocate_for_instance( [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] created_port_ids = self._update_ports_for_instance( [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] with excutils.save_and_reraise_exception(): [ 592.051078] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self.force_reraise() [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] raise self.value [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] updated_port = self._update_port( [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] _ensure_no_port_binding_failure(port) [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] raise exception.PortBindingFailed(port_id=port['id']) [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 592.051353] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] [ 592.051353] env[61905]: INFO nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Terminating instance [ 592.053492] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.075131] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.084300] env[61905]: DEBUG oslo_vmware.api [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362303, 'name': PowerOnVM_Task, 'duration_secs': 0.467345} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.084556] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 592.084750] env[61905]: INFO nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Took 4.86 seconds to spawn the instance on the hypervisor. [ 592.084921] env[61905]: DEBUG nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 592.085690] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9571f39d-aff8-40bb-9093-09c8af74098c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.350878] env[61905]: DEBUG nova.network.neutron [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.363446] env[61905]: DEBUG nova.compute.manager [None req-8e6c38ca-0468-4497-9c96-93c07025bc5c tempest-ServerDiagnosticsV248Test-1901360014 tempest-ServerDiagnosticsV248Test-1901360014-project-admin] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 592.365097] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8ea5f9-e0a2-443d-962e-bf040cdb4aa1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.380263] env[61905]: INFO nova.compute.manager [None req-8e6c38ca-0468-4497-9c96-93c07025bc5c tempest-ServerDiagnosticsV248Test-1901360014 tempest-ServerDiagnosticsV248Test-1901360014-project-admin] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Retrieving diagnostics [ 592.380263] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39143ee4-60df-40d5-b65b-63f2b8837409 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.473443] env[61905]: DEBUG nova.network.neutron [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.501020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.501124] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 592.503799] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.041s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.505656] env[61905]: INFO nova.compute.claims [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.534088] env[61905]: DEBUG oslo_vmware.api [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Task: {'id': task-1362305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217283} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.534316] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 592.534495] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 592.534667] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 592.534819] env[61905]: INFO nova.compute.manager [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Took 1.14 seconds to destroy the instance on the hypervisor. [ 592.535085] env[61905]: DEBUG oslo.service.loopingcall [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.535283] env[61905]: DEBUG nova.compute.manager [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 592.535394] env[61905]: DEBUG nova.network.neutron [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 592.552561] env[61905]: DEBUG nova.network.neutron [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.607865] env[61905]: INFO nova.compute.manager [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Took 25.67 seconds to build instance. [ 592.977342] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c8d4118-30fb-48b6-892b-304477b3c3b2 req-15346eb3-6ead-428a-8391-9108167c5171 service nova] Releasing lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.979527] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.979772] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.010286] env[61905]: DEBUG nova.compute.utils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.013970] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 593.014163] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.054913] env[61905]: DEBUG nova.network.neutron [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.076195] env[61905]: DEBUG nova.policy [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bed596e2d5994dc58ad3e291ac87adb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce758a283b4c4e5093fe2e5afcc6ab71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 593.109884] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9029c9a5-1890-48bd-9f4c-25a0e35c0c95 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "2a8bcc04-5519-4890-839b-64dcf422526d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.634s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.502385] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.514451] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 593.557635] env[61905]: INFO nova.compute.manager [-] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Took 1.02 seconds to deallocate network for instance. [ 593.612164] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 593.636274] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Successfully created port: 1ccbfe16-12b2-430b-a831-824fc133a512 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.664258] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.863788] env[61905]: DEBUG nova.compute.manager [req-6cefef8e-afad-4b47-84a3-c8a077c448b0 req-e2d7d7b5-8804-4d3d-b849-f8680ae45a48 service nova] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Received event network-vif-deleted-2d69d1c6-de7b-44a1-943a-5aec1d543aa3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 593.989738] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c46a37-c5e4-49ce-8d1f-d02f0a70e1bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.003507] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50c0e5b-ef4a-473e-b4ed-050a46bd394a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.049816] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e5911a-079b-4dc7-8f37-f9585c1eff6e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.057101] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970f7ff6-809d-4202-aac5-4771b52c0dc2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.075582] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.076399] env[61905]: DEBUG nova.compute.provider_tree [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.136363] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.166625] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.171145] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 594.174070] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.174070] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0852814-4f0c-48fc-b7d4-a01af23f266d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.181434] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8132c9-43b5-4438-bf7f-8bf0a70c52a5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.207784] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 56997e40-ec60-422d-b58c-8a628d37b1bc could not be found. [ 594.208011] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 594.208277] env[61905]: INFO nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 594.208523] env[61905]: DEBUG oslo.service.loopingcall [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.208739] env[61905]: DEBUG nova.compute.manager [-] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.208829] env[61905]: DEBUG nova.network.neutron [-] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 594.226198] env[61905]: DEBUG nova.network.neutron [-] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.547883] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 594.579864] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.580194] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.580267] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.580445] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.580583] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.580723] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.580924] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.581096] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.581262] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.581411] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.581577] env[61905]: DEBUG nova.virt.hardware [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.582426] env[61905]: DEBUG nova.scheduler.client.report [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 594.586239] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789a831f-b6ae-4ccf-9c05-6ed27cf6a5ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.595248] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d9bb18-95c6-422d-9dc0-918e3c64270b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.728598] env[61905]: DEBUG nova.network.neutron [-] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.816480] env[61905]: ERROR nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 594.816480] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.816480] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.816480] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.816480] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.816480] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.816480] env[61905]: ERROR nova.compute.manager raise self.value [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.816480] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 594.816480] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.816480] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 594.817374] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.817374] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 594.817374] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 594.817374] env[61905]: ERROR nova.compute.manager [ 594.817374] env[61905]: Traceback (most recent call last): [ 594.817374] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 594.817374] env[61905]: listener.cb(fileno) [ 594.817374] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.817374] env[61905]: result = function(*args, **kwargs) [ 594.817374] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.817374] env[61905]: return func(*args, **kwargs) [ 594.817374] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.817374] env[61905]: raise e [ 594.817374] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.817374] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 594.817374] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.817374] env[61905]: created_port_ids = self._update_ports_for_instance( [ 594.817374] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.817374] env[61905]: with excutils.save_and_reraise_exception(): [ 594.817374] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.817374] env[61905]: self.force_reraise() [ 594.817374] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.817374] env[61905]: raise self.value [ 594.817374] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.817374] env[61905]: updated_port = self._update_port( [ 594.817374] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.817374] env[61905]: _ensure_no_port_binding_failure(port) [ 594.817374] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.817374] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 594.818735] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 594.818735] env[61905]: Removing descriptor: 18 [ 594.818735] env[61905]: ERROR nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Traceback (most recent call last): [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] yield resources [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self.driver.spawn(context, instance, image_meta, [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 594.818735] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] vm_ref = self.build_virtual_machine(instance, [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] vif_infos = vmwarevif.get_vif_info(self._session, [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] for vif in network_info: [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return self._sync_wrapper(fn, *args, **kwargs) [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self.wait() [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self[:] = self._gt.wait() [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return self._exit_event.wait() [ 594.819021] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] result = hub.switch() [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return self.greenlet.switch() [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] result = function(*args, **kwargs) [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return func(*args, **kwargs) [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] raise e [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] nwinfo = self.network_api.allocate_for_instance( [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 594.819317] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] created_port_ids = self._update_ports_for_instance( [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] with excutils.save_and_reraise_exception(): [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self.force_reraise() [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] raise self.value [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] updated_port = self._update_port( [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] _ensure_no_port_binding_failure(port) [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 594.819585] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] raise exception.PortBindingFailed(port_id=port['id']) [ 594.819909] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 594.819909] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] [ 594.819909] env[61905]: INFO nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Terminating instance [ 594.821229] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquiring lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.821595] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquired lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.821840] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 595.090703] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.092807] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 595.095926] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.645s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.096189] env[61905]: DEBUG nova.objects.instance [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 595.231469] env[61905]: INFO nova.compute.manager [-] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Took 1.02 seconds to deallocate network for instance. [ 595.234444] env[61905]: DEBUG nova.compute.claims [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 595.234444] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.347097] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.435015] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.600278] env[61905]: DEBUG nova.compute.utils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 595.604464] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 595.605349] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 595.658376] env[61905]: DEBUG nova.policy [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f40d6d6fd374886a0f29824f37c2328', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a890d180b4d14864955629f63ec18850', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 595.938117] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Releasing lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.938314] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 595.938489] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 595.938795] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6101863d-de38-402a-86b1-ff5a994731c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.951873] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f73783b-1d45-458b-b63a-a61fd0ae1b54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.966436] env[61905]: DEBUG nova.compute.manager [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Received event network-changed-1ccbfe16-12b2-430b-a831-824fc133a512 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 595.966626] env[61905]: DEBUG nova.compute.manager [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Refreshing instance network info cache due to event network-changed-1ccbfe16-12b2-430b-a831-824fc133a512. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 595.966864] env[61905]: DEBUG oslo_concurrency.lockutils [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] Acquiring lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.967013] env[61905]: DEBUG oslo_concurrency.lockutils [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] Acquired lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.967177] env[61905]: DEBUG nova.network.neutron [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Refreshing network info cache for port 1ccbfe16-12b2-430b-a831-824fc133a512 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.984119] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 79c34bad-f29b-4d5e-97d5-6bfd6be55b31 could not be found. [ 595.984366] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 595.984544] env[61905]: INFO nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Took 0.05 seconds to destroy the instance on the hypervisor. [ 595.984787] env[61905]: DEBUG oslo.service.loopingcall [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.985296] env[61905]: DEBUG nova.compute.manager [-] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 595.985929] env[61905]: DEBUG nova.network.neutron [-] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 596.020429] env[61905]: DEBUG nova.network.neutron [-] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.052117] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Successfully created port: 128481ba-c3df-415b-a586-ad5e47af072b {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.108872] env[61905]: DEBUG oslo_concurrency.lockutils [None req-19ee0b14-735a-424d-b52d-c0b7fc461ce8 tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.110513] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 596.112840] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.691s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.498148] env[61905]: DEBUG nova.network.neutron [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.524794] env[61905]: DEBUG nova.network.neutron [-] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.674164] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquiring lock "8d2cb485-32da-4fe7-8462-d98c071a6310" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.674487] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Lock "8d2cb485-32da-4fe7-8462-d98c071a6310" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.683956] env[61905]: DEBUG nova.network.neutron [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.027032] env[61905]: INFO nova.compute.manager [-] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Took 1.04 seconds to deallocate network for instance. [ 597.031194] env[61905]: DEBUG nova.compute.claims [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 597.031376] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.075130] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0523f977-4552-4a57-ad10-84bd0308a5e8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.084830] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2055748-a51f-4048-810c-23cb1bef69a0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.115684] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188d19e4-de90-45bc-a07b-2c4433a1ca5d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.124117] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3092cd-1f59-484b-bacf-e6b6ef16e553 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.129199] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 597.144449] env[61905]: DEBUG nova.compute.provider_tree [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.165493] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 597.165708] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 597.165885] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.166118] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 597.166990] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.166990] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 597.166990] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 597.166990] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 597.166990] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 597.167268] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 597.167268] env[61905]: DEBUG nova.virt.hardware [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.168026] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37dfe8d-9a8d-496f-b144-10491a98190e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.179217] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228f1bb8-215d-4023-a513-339eccb6a550 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.192199] env[61905]: DEBUG oslo_concurrency.lockutils [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] Releasing lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.192444] env[61905]: DEBUG nova.compute.manager [req-642a8a90-df55-4fc8-8eba-bf897fbb6df2 req-85fc873d-2046-4e40-a4f7-8bc1dda16b4a service nova] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Received event network-vif-deleted-1ccbfe16-12b2-430b-a831-824fc133a512 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.269957] env[61905]: ERROR nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 597.269957] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.269957] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.269957] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.269957] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.269957] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.269957] env[61905]: ERROR nova.compute.manager raise self.value [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.269957] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 597.269957] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.269957] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 597.271113] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.271113] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 597.271113] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 597.271113] env[61905]: ERROR nova.compute.manager [ 597.271113] env[61905]: Traceback (most recent call last): [ 597.271113] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 597.271113] env[61905]: listener.cb(fileno) [ 597.271113] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.271113] env[61905]: result = function(*args, **kwargs) [ 597.271113] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.271113] env[61905]: return func(*args, **kwargs) [ 597.271113] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.271113] env[61905]: raise e [ 597.271113] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.271113] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 597.271113] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.271113] env[61905]: created_port_ids = self._update_ports_for_instance( [ 597.271113] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.271113] env[61905]: with excutils.save_and_reraise_exception(): [ 597.271113] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.271113] env[61905]: self.force_reraise() [ 597.271113] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.271113] env[61905]: raise self.value [ 597.271113] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.271113] env[61905]: updated_port = self._update_port( [ 597.271113] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.271113] env[61905]: _ensure_no_port_binding_failure(port) [ 597.271113] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.271113] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.272020] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 597.272020] env[61905]: Removing descriptor: 18 [ 597.272020] env[61905]: ERROR nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] Traceback (most recent call last): [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] yield resources [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self.driver.spawn(context, instance, image_meta, [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.272020] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] vm_ref = self.build_virtual_machine(instance, [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] vif_infos = vmwarevif.get_vif_info(self._session, [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] for vif in network_info: [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return self._sync_wrapper(fn, *args, **kwargs) [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self.wait() [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self[:] = self._gt.wait() [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return self._exit_event.wait() [ 597.272288] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] result = hub.switch() [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return self.greenlet.switch() [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] result = function(*args, **kwargs) [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return func(*args, **kwargs) [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] raise e [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] nwinfo = self.network_api.allocate_for_instance( [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 597.272676] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] created_port_ids = self._update_ports_for_instance( [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] with excutils.save_and_reraise_exception(): [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self.force_reraise() [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] raise self.value [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] updated_port = self._update_port( [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] _ensure_no_port_binding_failure(port) [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.272998] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] raise exception.PortBindingFailed(port_id=port['id']) [ 597.273254] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 597.273254] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] [ 597.273254] env[61905]: INFO nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Terminating instance [ 597.273499] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.273606] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquired lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.273765] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.647617] env[61905]: DEBUG nova.scheduler.client.report [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.792485] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.864687] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.000989] env[61905]: DEBUG nova.compute.manager [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] [instance: a9ce5207-c493-4924-8371-db65cf359523] Received event network-changed-128481ba-c3df-415b-a586-ad5e47af072b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 598.000989] env[61905]: DEBUG nova.compute.manager [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] [instance: a9ce5207-c493-4924-8371-db65cf359523] Refreshing instance network info cache due to event network-changed-128481ba-c3df-415b-a586-ad5e47af072b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 598.000989] env[61905]: DEBUG oslo_concurrency.lockutils [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] Acquiring lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.153044] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.040s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.153692] env[61905]: ERROR nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Traceback (most recent call last): [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self.driver.spawn(context, instance, image_meta, [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] vm_ref = self.build_virtual_machine(instance, [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.153692] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] for vif in network_info: [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return self._sync_wrapper(fn, *args, **kwargs) [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self.wait() [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self[:] = self._gt.wait() [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return self._exit_event.wait() [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] result = hub.switch() [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.154237] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return self.greenlet.switch() [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] result = function(*args, **kwargs) [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] return func(*args, **kwargs) [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] raise e [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] nwinfo = self.network_api.allocate_for_instance( [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] created_port_ids = self._update_ports_for_instance( [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] with excutils.save_and_reraise_exception(): [ 598.154583] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] self.force_reraise() [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] raise self.value [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] updated_port = self._update_port( [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] _ensure_no_port_binding_failure(port) [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] raise exception.PortBindingFailed(port_id=port['id']) [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] nova.exception.PortBindingFailed: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. [ 598.154925] env[61905]: ERROR nova.compute.manager [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] [ 598.155226] env[61905]: DEBUG nova.compute.utils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 598.155457] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.057s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.160729] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Build of instance 46ce0987-e757-4ec7-9f85-bd84e50f2324 was re-scheduled: Binding failed for port e483ef2f-6b51-4fda-bd1d-68909acd61e1, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 598.161044] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 598.161269] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquiring lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.161415] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Acquired lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.161571] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.367342] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Releasing lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.367774] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 598.367968] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 598.368305] env[61905]: DEBUG oslo_concurrency.lockutils [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] Acquired lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.368480] env[61905]: DEBUG nova.network.neutron [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] [instance: a9ce5207-c493-4924-8371-db65cf359523] Refreshing network info cache for port 128481ba-c3df-415b-a586-ad5e47af072b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.369676] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b8e28c0-f257-4dd4-8804-aff11b623ed0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.381143] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4156f59b-1ab8-472c-8522-3a313779635d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.406565] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a9ce5207-c493-4924-8371-db65cf359523 could not be found. [ 598.406802] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 598.406980] env[61905]: INFO nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Took 0.04 seconds to destroy the instance on the hypervisor. [ 598.407251] env[61905]: DEBUG oslo.service.loopingcall [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.407532] env[61905]: DEBUG nova.compute.manager [-] [instance: a9ce5207-c493-4924-8371-db65cf359523] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 598.407644] env[61905]: DEBUG nova.network.neutron [-] [instance: a9ce5207-c493-4924-8371-db65cf359523] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 598.425386] env[61905]: DEBUG nova.network.neutron [-] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.503464] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquiring lock "6783654c-4f87-4353-b9ba-1299158eba3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.503706] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Lock "6783654c-4f87-4353-b9ba-1299158eba3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.685210] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.759789] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.891801] env[61905]: DEBUG nova.network.neutron [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.928308] env[61905]: DEBUG nova.network.neutron [-] [instance: a9ce5207-c493-4924-8371-db65cf359523] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.947623] env[61905]: DEBUG nova.network.neutron [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] [instance: a9ce5207-c493-4924-8371-db65cf359523] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.079570] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c35525-bf1d-4d50-a303-01ace2def8d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.087966] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fffafe-3eb1-40d0-8aa2-ad24523c298f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.118054] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae94371-7430-4965-8e72-6230f1ec0e3f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.126208] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7f3ed9-a3b0-4e50-8c95-72ecf03e0464 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.139915] env[61905]: DEBUG nova.compute.provider_tree [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.262164] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Releasing lock "refresh_cache-46ce0987-e757-4ec7-9f85-bd84e50f2324" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.262404] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 599.262603] env[61905]: DEBUG nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.262759] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.291410] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 599.432164] env[61905]: INFO nova.compute.manager [-] [instance: a9ce5207-c493-4924-8371-db65cf359523] Took 1.02 seconds to deallocate network for instance. [ 599.434819] env[61905]: DEBUG nova.compute.claims [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 599.435049] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.452797] env[61905]: DEBUG oslo_concurrency.lockutils [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] Releasing lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.453070] env[61905]: DEBUG nova.compute.manager [req-5de3751d-660b-42ca-b2aa-1c2521c16c30 req-7024f5eb-a9ed-4b06-9c88-8638996ce741 service nova] [instance: a9ce5207-c493-4924-8371-db65cf359523] Received event network-vif-deleted-128481ba-c3df-415b-a586-ad5e47af072b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 599.644053] env[61905]: DEBUG nova.scheduler.client.report [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 599.794281] env[61905]: DEBUG nova.network.neutron [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.149970] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.994s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.152705] env[61905]: ERROR nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Traceback (most recent call last): [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self.driver.spawn(context, instance, image_meta, [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] vm_ref = self.build_virtual_machine(instance, [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.152705] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] for vif in network_info: [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return self._sync_wrapper(fn, *args, **kwargs) [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self.wait() [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self[:] = self._gt.wait() [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return self._exit_event.wait() [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] result = hub.switch() [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.153055] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return self.greenlet.switch() [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] result = function(*args, **kwargs) [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] return func(*args, **kwargs) [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] raise e [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] nwinfo = self.network_api.allocate_for_instance( [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] created_port_ids = self._update_ports_for_instance( [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] with excutils.save_and_reraise_exception(): [ 600.153399] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] self.force_reraise() [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] raise self.value [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] updated_port = self._update_port( [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] _ensure_no_port_binding_failure(port) [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] raise exception.PortBindingFailed(port_id=port['id']) [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] nova.exception.PortBindingFailed: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. [ 600.153761] env[61905]: ERROR nova.compute.manager [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] [ 600.154083] env[61905]: DEBUG nova.compute.utils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.154529] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Build of instance 195ac5ee-8da4-41e9-8c1b-291ea09c80c6 was re-scheduled: Binding failed for port c01e0a2a-db66-4e6a-b17f-43cba56f21b6, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 600.155017] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 600.155319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquiring lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.155531] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Acquired lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.155751] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.156972] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.801s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.297973] env[61905]: INFO nova.compute.manager [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] [instance: 46ce0987-e757-4ec7-9f85-bd84e50f2324] Took 1.03 seconds to deallocate network for instance. [ 600.679068] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.760450] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.034521] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbe082b-c695-44fb-8a48-aef768d2c179 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.044048] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a367b2d-6e1e-4620-b442-07f4e4f621fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.073884] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e89cb8-9c8f-468c-935b-aef52a35d8c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.082417] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cd7286-fe26-4fac-910e-7877e0f4ace0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.096268] env[61905]: DEBUG nova.compute.provider_tree [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.262676] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Releasing lock "refresh_cache-195ac5ee-8da4-41e9-8c1b-291ea09c80c6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.262974] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 601.263121] env[61905]: DEBUG nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 601.263301] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.277956] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.326192] env[61905]: INFO nova.scheduler.client.report [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Deleted allocations for instance 46ce0987-e757-4ec7-9f85-bd84e50f2324 [ 601.601837] env[61905]: DEBUG nova.scheduler.client.report [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.780279] env[61905]: DEBUG nova.network.neutron [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.838720] env[61905]: DEBUG oslo_concurrency.lockutils [None req-771c0435-3576-452d-9edd-caa8bc9c444a tempest-VolumesAssistedSnapshotsTest-489112387 tempest-VolumesAssistedSnapshotsTest-489112387-project-member] Lock "46ce0987-e757-4ec7-9f85-bd84e50f2324" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.618s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.107639] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.951s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.108419] env[61905]: ERROR nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Traceback (most recent call last): [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self.driver.spawn(context, instance, image_meta, [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] vm_ref = self.build_virtual_machine(instance, [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.108419] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] for vif in network_info: [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return self._sync_wrapper(fn, *args, **kwargs) [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self.wait() [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self[:] = self._gt.wait() [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return self._exit_event.wait() [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] result = hub.switch() [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.108799] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return self.greenlet.switch() [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] result = function(*args, **kwargs) [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] return func(*args, **kwargs) [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] raise e [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] nwinfo = self.network_api.allocate_for_instance( [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] created_port_ids = self._update_ports_for_instance( [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] with excutils.save_and_reraise_exception(): [ 602.109092] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] self.force_reraise() [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] raise self.value [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] updated_port = self._update_port( [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] _ensure_no_port_binding_failure(port) [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] raise exception.PortBindingFailed(port_id=port['id']) [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] nova.exception.PortBindingFailed: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. [ 602.109371] env[61905]: ERROR nova.compute.manager [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] [ 602.109614] env[61905]: DEBUG nova.compute.utils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 602.110320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.291s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.113240] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Build of instance b788bb84-07b9-4407-9e6e-cac6510166b7 was re-scheduled: Binding failed for port 6463419a-5ed8-4493-9d1c-150d40412efc, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 602.113651] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 602.113879] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.114034] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquired lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.114193] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.283639] env[61905]: INFO nova.compute.manager [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] [instance: 195ac5ee-8da4-41e9-8c1b-291ea09c80c6] Took 1.02 seconds to deallocate network for instance. [ 602.342314] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 602.635345] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.727833] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.866411] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.954025] env[61905]: DEBUG nova.compute.manager [None req-dc2a3cca-55be-45b0-b9fc-b9c9f1699291 tempest-ServerDiagnosticsV248Test-1901360014 tempest-ServerDiagnosticsV248Test-1901360014-project-admin] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 602.954025] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7be506-60b3-49a4-b694-21934b13e680 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.967275] env[61905]: INFO nova.compute.manager [None req-dc2a3cca-55be-45b0-b9fc-b9c9f1699291 tempest-ServerDiagnosticsV248Test-1901360014 tempest-ServerDiagnosticsV248Test-1901360014-project-admin] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Retrieving diagnostics [ 602.968497] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4f9bfd-c9e1-4ae7-a82b-1ea4b1b53862 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.036062] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31de04b3-19f3-4f68-866b-f98130d15b11 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.043989] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f42102-739d-4405-a138-ada6bf130523 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.073463] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aff98e7-b037-4a07-ac05-927efe91d9af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.081405] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf6cc38-4413-4f6e-b312-9fd0774b2321 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.095482] env[61905]: DEBUG nova.compute.provider_tree [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.231643] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Releasing lock "refresh_cache-b788bb84-07b9-4407-9e6e-cac6510166b7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.231901] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 603.232647] env[61905]: DEBUG nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 603.232647] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 603.250480] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.322299] env[61905]: INFO nova.scheduler.client.report [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Deleted allocations for instance 195ac5ee-8da4-41e9-8c1b-291ea09c80c6 [ 603.602027] env[61905]: DEBUG nova.scheduler.client.report [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 603.753597] env[61905]: DEBUG nova.network.neutron [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.831724] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f7621b0-0b90-40fb-91a1-0d54191d4f29 tempest-ServersWithSpecificFlavorTestJSON-456201941 tempest-ServersWithSpecificFlavorTestJSON-456201941-project-member] Lock "195ac5ee-8da4-41e9-8c1b-291ea09c80c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.340s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.085849] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "2a8bcc04-5519-4890-839b-64dcf422526d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.086150] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "2a8bcc04-5519-4890-839b-64dcf422526d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.086365] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "2a8bcc04-5519-4890-839b-64dcf422526d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.086543] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "2a8bcc04-5519-4890-839b-64dcf422526d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.086706] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "2a8bcc04-5519-4890-839b-64dcf422526d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.088899] env[61905]: INFO nova.compute.manager [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Terminating instance [ 604.090559] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "refresh_cache-2a8bcc04-5519-4890-839b-64dcf422526d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.090776] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquired lock "refresh_cache-2a8bcc04-5519-4890-839b-64dcf422526d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.090873] env[61905]: DEBUG nova.network.neutron [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.106107] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.996s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.106726] env[61905]: ERROR nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Traceback (most recent call last): [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self.driver.spawn(context, instance, image_meta, [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] vm_ref = self.build_virtual_machine(instance, [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.106726] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] for vif in network_info: [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return self._sync_wrapper(fn, *args, **kwargs) [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self.wait() [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self[:] = self._gt.wait() [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return self._exit_event.wait() [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] result = hub.switch() [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.107053] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return self.greenlet.switch() [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] result = function(*args, **kwargs) [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] return func(*args, **kwargs) [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] raise e [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] nwinfo = self.network_api.allocate_for_instance( [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] created_port_ids = self._update_ports_for_instance( [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] with excutils.save_and_reraise_exception(): [ 604.107494] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] self.force_reraise() [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] raise self.value [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] updated_port = self._update_port( [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] _ensure_no_port_binding_failure(port) [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] raise exception.PortBindingFailed(port_id=port['id']) [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] nova.exception.PortBindingFailed: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. [ 604.107837] env[61905]: ERROR nova.compute.manager [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] [ 604.108175] env[61905]: DEBUG nova.compute.utils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 604.109433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.190s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.109433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.109433] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 604.109433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.744s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.112180] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Build of instance 88c496a6-8007-4111-8ac1-6e0f8680ef24 was re-scheduled: Binding failed for port 040dcde9-8647-4d14-8b86-fa3902e34d2a, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 604.112623] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 604.113211] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquiring lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.113371] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Acquired lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.113528] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 604.115114] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d01c7b-9466-4eba-bac3-e737dac4e095 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.128382] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469c87c8-9eea-41b1-9512-8c046f0cfe0d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.148344] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c875e54-b3b0-4f09-a325-6f7c36ec9ccb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.156143] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103d6c31-097d-4095-b6a4-0b7fbcf3cd81 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.185314] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181480MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 604.185466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.258233] env[61905]: INFO nova.compute.manager [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: b788bb84-07b9-4407-9e6e-cac6510166b7] Took 1.03 seconds to deallocate network for instance. [ 604.333544] env[61905]: DEBUG nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 604.622506] env[61905]: DEBUG nova.network.neutron [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.655756] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.768112] env[61905]: DEBUG nova.network.neutron [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.856589] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.863634] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.105885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b944114-ef7f-4bf7-9381-a445291ad152 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.114385] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3105c86-6563-497b-8405-d283406dbc53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.149131] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a29776-529e-4693-8fee-7c57471088cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.157669] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab95415-1a2a-4163-a77f-25e1248a31e3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.175058] env[61905]: DEBUG nova.compute.provider_tree [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.275408] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Releasing lock "refresh_cache-2a8bcc04-5519-4890-839b-64dcf422526d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.275786] env[61905]: DEBUG nova.compute.manager [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 605.276014] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.277081] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f191e958-e298-45f6-bb31-77902252e626 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.285532] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 605.285650] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69bfed98-5acb-46c0-bc19-f50c99589842 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.293574] env[61905]: DEBUG oslo_vmware.api [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 605.293574] env[61905]: value = "task-1362308" [ 605.293574] env[61905]: _type = "Task" [ 605.293574] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.305356] env[61905]: DEBUG oslo_vmware.api [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362308, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.306488] env[61905]: INFO nova.scheduler.client.report [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Deleted allocations for instance b788bb84-07b9-4407-9e6e-cac6510166b7 [ 605.359904] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Releasing lock "refresh_cache-88c496a6-8007-4111-8ac1-6e0f8680ef24" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.359904] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 605.360303] env[61905]: DEBUG nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 605.360303] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.389842] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.035168] env[61905]: DEBUG nova.scheduler.client.report [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 606.038686] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8f3fa050-381c-4eb8-b9de-d5bc85769102 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "b788bb84-07b9-4407-9e6e-cac6510166b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.113s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.039243] env[61905]: DEBUG nova.network.neutron [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.053431] env[61905]: DEBUG oslo_vmware.api [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362308, 'name': PowerOffVM_Task, 'duration_secs': 0.123825} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.054111] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 606.054283] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 606.054528] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80236cbc-d2be-449e-aad9-ecefc8a17a8a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.088267] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 606.088267] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 606.088267] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Deleting the datastore file [datastore2] 2a8bcc04-5519-4890-839b-64dcf422526d {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.088267] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7169911f-006c-42e7-aaeb-55f0a88fe7e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.092839] env[61905]: DEBUG oslo_vmware.api [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for the task: (returnval){ [ 606.092839] env[61905]: value = "task-1362310" [ 606.092839] env[61905]: _type = "Task" [ 606.092839] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.102253] env[61905]: DEBUG oslo_vmware.api [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.542456] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.433s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.543127] env[61905]: ERROR nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Traceback (most recent call last): [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self.driver.spawn(context, instance, image_meta, [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] vm_ref = self.build_virtual_machine(instance, [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] vif_infos = vmwarevif.get_vif_info(self._session, [ 606.543127] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] for vif in network_info: [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return self._sync_wrapper(fn, *args, **kwargs) [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self.wait() [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self[:] = self._gt.wait() [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return self._exit_event.wait() [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] result = hub.switch() [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 606.543453] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return self.greenlet.switch() [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] result = function(*args, **kwargs) [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] return func(*args, **kwargs) [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] raise e [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] nwinfo = self.network_api.allocate_for_instance( [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] created_port_ids = self._update_ports_for_instance( [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] with excutils.save_and_reraise_exception(): [ 606.543880] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] self.force_reraise() [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] raise self.value [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] updated_port = self._update_port( [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] _ensure_no_port_binding_failure(port) [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] raise exception.PortBindingFailed(port_id=port['id']) [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] nova.exception.PortBindingFailed: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. [ 606.544214] env[61905]: ERROR nova.compute.manager [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] [ 606.544640] env[61905]: DEBUG nova.compute.utils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 606.548024] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.273s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.548024] env[61905]: DEBUG nova.objects.instance [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 606.548370] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Build of instance 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e was re-scheduled: Binding failed for port c5ee59c6-83ee-4c77-b728-0da1d37e65ac, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 606.548807] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 606.549047] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquiring lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.549228] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Acquired lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.549394] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.550499] env[61905]: INFO nova.compute.manager [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] [instance: 88c496a6-8007-4111-8ac1-6e0f8680ef24] Took 1.19 seconds to deallocate network for instance. [ 606.555259] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 606.604775] env[61905]: DEBUG oslo_vmware.api [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Task: {'id': task-1362310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119996} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.605198] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 606.605466] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 606.606781] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.606781] env[61905]: INFO nova.compute.manager [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Took 1.33 seconds to destroy the instance on the hypervisor. [ 606.606781] env[61905]: DEBUG oslo.service.loopingcall [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.606781] env[61905]: DEBUG nova.compute.manager [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.606781] env[61905]: DEBUG nova.network.neutron [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.628132] env[61905]: DEBUG nova.network.neutron [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.091589] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.101043] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.133418] env[61905]: DEBUG nova.network.neutron [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.314193] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.567149] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8140910a-edbd-4674-8111-28b6efb28d98 tempest-ServersAdmin275Test-1397741078 tempest-ServersAdmin275Test-1397741078-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.022s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.568267] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.494s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.570885] env[61905]: INFO nova.compute.claims [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.595701] env[61905]: INFO nova.scheduler.client.report [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Deleted allocations for instance 88c496a6-8007-4111-8ac1-6e0f8680ef24 [ 607.636100] env[61905]: INFO nova.compute.manager [-] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Took 1.03 seconds to deallocate network for instance. [ 607.819120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Releasing lock "refresh_cache-6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.819656] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 607.819910] env[61905]: DEBUG nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 607.820166] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 607.851171] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.107579] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3384d742-275a-46c0-a6ff-1ebfb10f7aa7 tempest-ImagesOneServerNegativeTestJSON-1650893753 tempest-ImagesOneServerNegativeTestJSON-1650893753-project-member] Lock "88c496a6-8007-4111-8ac1-6e0f8680ef24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.204s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.142881] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.354262] env[61905]: DEBUG nova.network.neutron [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.613380] env[61905]: DEBUG nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 608.857250] env[61905]: INFO nova.compute.manager [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] [instance: 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e] Took 1.04 seconds to deallocate network for instance. [ 609.004761] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8457e6d5-cc7f-4589-a226-feac38b6435c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.013392] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ea94bc-afe0-400f-bcda-938e69b62a03 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.047384] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7292cc2d-c971-4e57-a5ad-73f2038b28c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.055346] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055ebfb2-4bf9-427e-ac94-b1527e989aaa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.070158] env[61905]: DEBUG nova.compute.provider_tree [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.135402] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.574304] env[61905]: DEBUG nova.scheduler.client.report [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.894476] env[61905]: INFO nova.scheduler.client.report [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Deleted allocations for instance 6b3d7973-c1bf-41bc-9f03-0dd3371bb71e [ 610.079763] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.080311] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 610.082848] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.007s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.083078] env[61905]: DEBUG nova.objects.instance [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lazy-loading 'resources' on Instance uuid 72770472-1b79-4408-b32c-34e56fd27c45 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 610.404345] env[61905]: DEBUG oslo_concurrency.lockutils [None req-232ae569-293c-4ea8-80aa-c01aec63a1d8 tempest-ServersAdminTestJSON-153392001 tempest-ServersAdminTestJSON-153392001-project-member] Lock "6b3d7973-c1bf-41bc-9f03-0dd3371bb71e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.239s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.592960] env[61905]: DEBUG nova.compute.utils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.599150] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.599150] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 610.665347] env[61905]: DEBUG nova.policy [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2249801e5448a89ba2d7d529d6f795', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cf3e2e1dfd944c49e9360d09484525c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 610.907554] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 611.090273] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5568636e-1767-4fdf-96fd-eb3aed4535d8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.098431] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab4939e-a581-43ad-9d1f-d1c1e1a123c8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.104869] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 611.136877] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9f10c1-71d5-4840-9e44-a49d7db439d6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.146445] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1fb7f7-9d72-449b-81e0-9f27cef5bc58 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.152113] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Successfully created port: 97bce231-0da1-492b-8afa-feda650f65cb {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.166361] env[61905]: DEBUG nova.compute.provider_tree [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.434292] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.670670] env[61905]: DEBUG nova.scheduler.client.report [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.116624] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 612.140714] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.140981] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.141196] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.141333] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.142096] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.142291] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.142540] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.142714] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.142882] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.143745] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.143953] env[61905]: DEBUG nova.virt.hardware [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.146464] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a115b7-564c-4882-848d-69b07a9cd445 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.157538] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43139c6c-d28b-44de-8b03-ced039d5cbde {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.180762] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.183985] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.048s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.185623] env[61905]: INFO nova.compute.claims [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.231242] env[61905]: INFO nova.scheduler.client.report [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Deleted allocations for instance 72770472-1b79-4408-b32c-34e56fd27c45 [ 612.484086] env[61905]: DEBUG nova.compute.manager [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Received event network-changed-97bce231-0da1-492b-8afa-feda650f65cb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 612.484086] env[61905]: DEBUG nova.compute.manager [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Refreshing instance network info cache due to event network-changed-97bce231-0da1-492b-8afa-feda650f65cb. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 612.484086] env[61905]: DEBUG oslo_concurrency.lockutils [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] Acquiring lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.484086] env[61905]: DEBUG oslo_concurrency.lockutils [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] Acquired lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.484086] env[61905]: DEBUG nova.network.neutron [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Refreshing network info cache for port 97bce231-0da1-492b-8afa-feda650f65cb {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.742493] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f4a2681-b5b2-4fcc-ba20-2131eb39040c tempest-ServersAdmin275Test-981941344 tempest-ServersAdmin275Test-981941344-project-member] Lock "72770472-1b79-4408-b32c-34e56fd27c45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.489s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.105733] env[61905]: DEBUG nova.network.neutron [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.336193] env[61905]: ERROR nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 613.336193] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.336193] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.336193] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.336193] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.336193] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.336193] env[61905]: ERROR nova.compute.manager raise self.value [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.336193] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 613.336193] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.336193] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 613.336774] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.336774] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 613.336774] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 613.336774] env[61905]: ERROR nova.compute.manager [ 613.336774] env[61905]: Traceback (most recent call last): [ 613.336774] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 613.336774] env[61905]: listener.cb(fileno) [ 613.336774] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.336774] env[61905]: result = function(*args, **kwargs) [ 613.336774] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.336774] env[61905]: return func(*args, **kwargs) [ 613.336774] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.336774] env[61905]: raise e [ 613.336774] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.336774] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 613.336774] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.336774] env[61905]: created_port_ids = self._update_ports_for_instance( [ 613.336774] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.336774] env[61905]: with excutils.save_and_reraise_exception(): [ 613.336774] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.336774] env[61905]: self.force_reraise() [ 613.336774] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.336774] env[61905]: raise self.value [ 613.336774] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.336774] env[61905]: updated_port = self._update_port( [ 613.336774] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.336774] env[61905]: _ensure_no_port_binding_failure(port) [ 613.336774] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.336774] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 613.337530] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 613.337530] env[61905]: Removing descriptor: 17 [ 613.337530] env[61905]: ERROR nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Traceback (most recent call last): [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] yield resources [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self.driver.spawn(context, instance, image_meta, [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.337530] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] vm_ref = self.build_virtual_machine(instance, [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] for vif in network_info: [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return self._sync_wrapper(fn, *args, **kwargs) [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self.wait() [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self[:] = self._gt.wait() [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return self._exit_event.wait() [ 613.337827] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] result = hub.switch() [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return self.greenlet.switch() [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] result = function(*args, **kwargs) [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return func(*args, **kwargs) [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] raise e [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] nwinfo = self.network_api.allocate_for_instance( [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.338169] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] created_port_ids = self._update_ports_for_instance( [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] with excutils.save_and_reraise_exception(): [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self.force_reraise() [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] raise self.value [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] updated_port = self._update_port( [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] _ensure_no_port_binding_failure(port) [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.338519] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] raise exception.PortBindingFailed(port_id=port['id']) [ 613.338828] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 613.338828] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] [ 613.338828] env[61905]: INFO nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Terminating instance [ 613.340631] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquiring lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.398830] env[61905]: DEBUG nova.network.neutron [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.641614] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb935644-19ec-445a-a0a3-0d4b21fb0c61 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.651382] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2ce8ca-7503-475f-babd-d76bfe943522 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.687332] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb23f04-9c4f-40ae-82d9-b4808e5c8c20 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.697442] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6956ece9-fba2-4865-bd22-c689ea8b0663 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.714923] env[61905]: DEBUG nova.compute.provider_tree [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.902405] env[61905]: DEBUG oslo_concurrency.lockutils [req-e382cc90-3f46-496d-8671-0e971d20492a req-3d08b2f7-e4a6-4f79-b01e-9095b57f9299 service nova] Releasing lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.902882] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquired lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.903084] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.218474] env[61905]: DEBUG nova.scheduler.client.report [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.430405] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.564164] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.680087] env[61905]: DEBUG nova.compute.manager [req-2437ccdb-db8f-4181-b8ac-bcf369b19e27 req-d8db24d4-1ff1-4b4a-864d-9b84a2da81fe service nova] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Received event network-vif-deleted-97bce231-0da1-492b-8afa-feda650f65cb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 614.723514] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.733120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.493s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.067403] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Releasing lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.067827] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 615.068023] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 615.068341] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91abdb50-00dd-4b63-a9e7-afe7246d198b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.078604] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1774eb-15f1-49bd-9f50-eb524dc31d9f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.106455] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e997db40-b3a6-4c06-8991-cdb96954c0ca could not be found. [ 615.106691] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.106878] env[61905]: INFO nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Took 0.04 seconds to destroy the instance on the hypervisor. [ 615.107156] env[61905]: DEBUG oslo.service.loopingcall [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.107392] env[61905]: DEBUG nova.compute.manager [-] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 615.107478] env[61905]: DEBUG nova.network.neutron [-] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.134811] env[61905]: DEBUG nova.network.neutron [-] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.228180] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquiring lock "4705efc3-ba73-4b5b-a698-06e34f5b651e" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.228461] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "4705efc3-ba73-4b5b-a698-06e34f5b651e" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.639276] env[61905]: DEBUG nova.network.neutron [-] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.726145] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8746a1ed-0524-4913-bb46-55fd753dabf6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.735837] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952dc9a2-0920-452e-871a-0d12e981243f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.741124] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "4705efc3-ba73-4b5b-a698-06e34f5b651e" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.512s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.741523] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 615.779280] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8506f4-cea1-4ce0-886c-db29ddc66353 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.788561] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca86309c-1624-4557-8c0c-1de1760c0698 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.807443] env[61905]: DEBUG nova.compute.provider_tree [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.142548] env[61905]: INFO nova.compute.manager [-] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Took 1.03 seconds to deallocate network for instance. [ 616.145720] env[61905]: DEBUG nova.compute.claims [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 616.145720] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.250189] env[61905]: DEBUG nova.compute.utils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.250189] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 616.250189] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 616.311526] env[61905]: DEBUG nova.scheduler.client.report [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.354583] env[61905]: DEBUG nova.policy [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c1fbf237c1b4904b9c10ab110550f48', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91e646a0bdbc456788634737bd4cea65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.754020] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 616.820647] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.094s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.821282] env[61905]: ERROR nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Traceback (most recent call last): [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self.driver.spawn(context, instance, image_meta, [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] vm_ref = self.build_virtual_machine(instance, [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 616.821282] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] for vif in network_info: [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] return self._sync_wrapper(fn, *args, **kwargs) [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self.wait() [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self[:] = self._gt.wait() [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] return self._exit_event.wait() [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] current.throw(*self._exc) [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 616.821603] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] result = function(*args, **kwargs) [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] return func(*args, **kwargs) [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] raise e [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] nwinfo = self.network_api.allocate_for_instance( [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] created_port_ids = self._update_ports_for_instance( [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] with excutils.save_and_reraise_exception(): [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] self.force_reraise() [ 616.821941] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] raise self.value [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] updated_port = self._update_port( [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] _ensure_no_port_binding_failure(port) [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] raise exception.PortBindingFailed(port_id=port['id']) [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] nova.exception.PortBindingFailed: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. [ 616.822337] env[61905]: ERROR nova.compute.manager [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] [ 616.822337] env[61905]: DEBUG nova.compute.utils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 616.824724] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.793s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.831815] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Build of instance 56997e40-ec60-422d-b58c-8a628d37b1bc was re-scheduled: Binding failed for port 2d69d1c6-de7b-44a1-943a-5aec1d543aa3, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 616.831815] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 616.831815] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.831815] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.831815] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.145339] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Successfully created port: 66c29463-feaf-46d1-833a-37b3a678e28a {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.403685] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.467536] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquiring lock "24f832e6-9704-4105-a17f-c5c77fa52d74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.467756] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Lock "24f832e6-9704-4105-a17f-c5c77fa52d74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.695115] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.767076] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 617.802894] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.803582] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.803582] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.803582] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.804015] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.806299] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.806717] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.806929] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.807116] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.807288] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.808382] env[61905]: DEBUG nova.virt.hardware [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.809381] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fab375-0800-4ffd-8663-17e0c71a3857 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.828649] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f8155e-fe06-4f2b-af83-8c6385a7aaeb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.903890] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20cc938-d96e-4b20-86b0-28a15b3866bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.912806] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7719946d-155b-4414-90de-a14e802588fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.956418] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50775882-9b90-4298-9984-085a9a5bf8bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.964172] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0ff609-a786-4187-b75e-84f4ffa9b406 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.979333] env[61905]: DEBUG nova.compute.provider_tree [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.203836] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-56997e40-ec60-422d-b58c-8a628d37b1bc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.204049] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.204243] env[61905]: DEBUG nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.204413] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 618.244129] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.481375] env[61905]: DEBUG nova.scheduler.client.report [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 618.752187] env[61905]: DEBUG nova.network.neutron [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.989574] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.164s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.991184] env[61905]: ERROR nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Traceback (most recent call last): [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self.driver.spawn(context, instance, image_meta, [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] vm_ref = self.build_virtual_machine(instance, [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.991184] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] for vif in network_info: [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return self._sync_wrapper(fn, *args, **kwargs) [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self.wait() [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self[:] = self._gt.wait() [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return self._exit_event.wait() [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] result = hub.switch() [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.991530] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return self.greenlet.switch() [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] result = function(*args, **kwargs) [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] return func(*args, **kwargs) [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] raise e [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] nwinfo = self.network_api.allocate_for_instance( [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] created_port_ids = self._update_ports_for_instance( [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] with excutils.save_and_reraise_exception(): [ 618.992023] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] self.force_reraise() [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] raise self.value [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] updated_port = self._update_port( [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] _ensure_no_port_binding_failure(port) [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] raise exception.PortBindingFailed(port_id=port['id']) [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] nova.exception.PortBindingFailed: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. [ 618.992410] env[61905]: ERROR nova.compute.manager [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] [ 618.992695] env[61905]: DEBUG nova.compute.utils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 618.996110] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.559s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.002850] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Build of instance 79c34bad-f29b-4d5e-97d5-6bfd6be55b31 was re-scheduled: Binding failed for port 1ccbfe16-12b2-430b-a831-824fc133a512, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 619.002850] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 619.002850] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquiring lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.002850] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Acquired lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.003065] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.255248] env[61905]: INFO nova.compute.manager [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 56997e40-ec60-422d-b58c-8a628d37b1bc] Took 1.05 seconds to deallocate network for instance. [ 619.443369] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "5bdd7f80-e321-475f-8132-7047a7f24c75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.443369] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "5bdd7f80-e321-475f-8132-7047a7f24c75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.537267] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.545099] env[61905]: ERROR nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 619.545099] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.545099] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.545099] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.545099] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.545099] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.545099] env[61905]: ERROR nova.compute.manager raise self.value [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.545099] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 619.545099] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.545099] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 619.545553] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.545553] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 619.545553] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 619.545553] env[61905]: ERROR nova.compute.manager [ 619.545553] env[61905]: Traceback (most recent call last): [ 619.545553] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 619.545553] env[61905]: listener.cb(fileno) [ 619.545553] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.545553] env[61905]: result = function(*args, **kwargs) [ 619.545553] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 619.545553] env[61905]: return func(*args, **kwargs) [ 619.545553] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.545553] env[61905]: raise e [ 619.545553] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.545553] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 619.545553] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.545553] env[61905]: created_port_ids = self._update_ports_for_instance( [ 619.545553] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.545553] env[61905]: with excutils.save_and_reraise_exception(): [ 619.545553] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.545553] env[61905]: self.force_reraise() [ 619.545553] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.545553] env[61905]: raise self.value [ 619.545553] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.545553] env[61905]: updated_port = self._update_port( [ 619.545553] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.545553] env[61905]: _ensure_no_port_binding_failure(port) [ 619.545553] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.545553] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 619.546338] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 619.546338] env[61905]: Removing descriptor: 18 [ 619.546338] env[61905]: ERROR nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Traceback (most recent call last): [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] yield resources [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self.driver.spawn(context, instance, image_meta, [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.546338] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] vm_ref = self.build_virtual_machine(instance, [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] for vif in network_info: [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return self._sync_wrapper(fn, *args, **kwargs) [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self.wait() [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self[:] = self._gt.wait() [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return self._exit_event.wait() [ 619.546651] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] result = hub.switch() [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return self.greenlet.switch() [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] result = function(*args, **kwargs) [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return func(*args, **kwargs) [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] raise e [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] nwinfo = self.network_api.allocate_for_instance( [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 619.547093] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] created_port_ids = self._update_ports_for_instance( [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] with excutils.save_and_reraise_exception(): [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self.force_reraise() [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] raise self.value [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] updated_port = self._update_port( [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] _ensure_no_port_binding_failure(port) [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.547412] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] raise exception.PortBindingFailed(port_id=port['id']) [ 619.547704] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 619.547704] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] [ 619.547704] env[61905]: INFO nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Terminating instance [ 619.549416] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquiring lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.549416] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquired lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.549416] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.653598] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.662436] env[61905]: DEBUG nova.compute.manager [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Received event network-changed-66c29463-feaf-46d1-833a-37b3a678e28a {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 619.662636] env[61905]: DEBUG nova.compute.manager [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Refreshing instance network info cache due to event network-changed-66c29463-feaf-46d1-833a-37b3a678e28a. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 619.662829] env[61905]: DEBUG oslo_concurrency.lockutils [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] Acquiring lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.993414] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980d94df-cac6-4548-bc51-57c238baffec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.002218] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be84048d-0057-4735-8116-90900cc8cd60 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.039970] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28627d0-bc26-49c4-91a1-7d49cb4019d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.043683] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquiring lock "d4e5eb34-6f16-4920-9f95-7ea8b080084b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 620.043969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Lock "d4e5eb34-6f16-4920-9f95-7ea8b080084b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 620.054309] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b00e50c-a529-456b-8308-98395011df03 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.068794] env[61905]: DEBUG nova.compute.provider_tree [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.073777] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.160169] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Releasing lock "refresh_cache-79c34bad-f29b-4d5e-97d5-6bfd6be55b31" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.160169] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 620.160293] env[61905]: DEBUG nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.160415] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.180533] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.183044] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.296872] env[61905]: INFO nova.scheduler.client.report [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance 56997e40-ec60-422d-b58c-8a628d37b1bc [ 620.571701] env[61905]: DEBUG nova.scheduler.client.report [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 620.685057] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Releasing lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.685649] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 620.685930] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.686361] env[61905]: DEBUG nova.network.neutron [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.687464] env[61905]: DEBUG oslo_concurrency.lockutils [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] Acquired lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.687641] env[61905]: DEBUG nova.network.neutron [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Refreshing network info cache for port 66c29463-feaf-46d1-833a-37b3a678e28a {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.690022] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1cbec8c-2bba-49b6-9737-5129e37b28a0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.700167] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c35f742-b205-4cbf-a819-3eef4a937d92 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.722712] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 426a5334-83fb-4c2a-85ac-42a8dddd775b could not be found. [ 620.722925] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.723123] env[61905]: INFO nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 620.723360] env[61905]: DEBUG oslo.service.loopingcall [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.723565] env[61905]: DEBUG nova.compute.manager [-] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.723659] env[61905]: DEBUG nova.network.neutron [-] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.738649] env[61905]: DEBUG nova.network.neutron [-] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.809802] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6ec3126-e682-49f6-967e-bc64c0cdd577 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "56997e40-ec60-422d-b58c-8a628d37b1bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.478s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.077579] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.083s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.077885] env[61905]: ERROR nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] Traceback (most recent call last): [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self.driver.spawn(context, instance, image_meta, [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] vm_ref = self.build_virtual_machine(instance, [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.077885] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] for vif in network_info: [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return self._sync_wrapper(fn, *args, **kwargs) [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self.wait() [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self[:] = self._gt.wait() [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return self._exit_event.wait() [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] result = hub.switch() [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.079196] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return self.greenlet.switch() [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] result = function(*args, **kwargs) [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] return func(*args, **kwargs) [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] raise e [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] nwinfo = self.network_api.allocate_for_instance( [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] created_port_ids = self._update_ports_for_instance( [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] with excutils.save_and_reraise_exception(): [ 621.079636] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] self.force_reraise() [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] raise self.value [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] updated_port = self._update_port( [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] _ensure_no_port_binding_failure(port) [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] raise exception.PortBindingFailed(port_id=port['id']) [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] nova.exception.PortBindingFailed: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. [ 621.080028] env[61905]: ERROR nova.compute.manager [instance: a9ce5207-c493-4924-8371-db65cf359523] [ 621.080407] env[61905]: DEBUG nova.compute.utils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 621.081367] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.215s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.083937] env[61905]: INFO nova.compute.claims [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.087029] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Build of instance a9ce5207-c493-4924-8371-db65cf359523 was re-scheduled: Binding failed for port 128481ba-c3df-415b-a586-ad5e47af072b, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.088903] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.088903] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquiring lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.088903] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Acquired lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.088903] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.193547] env[61905]: INFO nova.compute.manager [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] [instance: 79c34bad-f29b-4d5e-97d5-6bfd6be55b31] Took 1.03 seconds to deallocate network for instance. [ 621.221014] env[61905]: DEBUG nova.network.neutron [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.244099] env[61905]: DEBUG nova.network.neutron [-] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.313417] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 621.328579] env[61905]: DEBUG nova.network.neutron [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.611463] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.676035] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.747829] env[61905]: INFO nova.compute.manager [-] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Took 1.02 seconds to deallocate network for instance. [ 621.750785] env[61905]: DEBUG nova.compute.claims [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.750965] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.779506] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquiring lock "090f2b46-e4f1-4b6b-b596-dd1937969007" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.779836] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Lock "090f2b46-e4f1-4b6b-b596-dd1937969007" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.832302] env[61905]: DEBUG oslo_concurrency.lockutils [req-7b081169-59bd-4d79-9597-f4fe774f7005 req-6f570f21-b3d7-4db0-a6ed-0aa747ae166b service nova] Releasing lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.836974] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.895196] env[61905]: DEBUG nova.compute.manager [req-3e040934-ce8a-4231-8b01-290520bb12ce req-38d4e48a-52ec-46a3-b84b-d5a2e7118a28 service nova] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Received event network-vif-deleted-66c29463-feaf-46d1-833a-37b3a678e28a {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.179437] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Releasing lock "refresh_cache-a9ce5207-c493-4924-8371-db65cf359523" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.180508] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 622.180508] env[61905]: DEBUG nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 622.180508] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 622.209843] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.228128] env[61905]: INFO nova.scheduler.client.report [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Deleted allocations for instance 79c34bad-f29b-4d5e-97d5-6bfd6be55b31 [ 622.514724] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-897d1e54-eb9f-49c1-b6c7-98c5d0e82e6e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.522896] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b02677-7c08-4f94-b192-acc06cd1a5be {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.556729] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accbd224-1d78-4c74-bddc-a4907f3540b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.564444] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0cc434-9a50-4d18-9c15-1dabe8a3864f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.577966] env[61905]: DEBUG nova.compute.provider_tree [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.713347] env[61905]: DEBUG nova.network.neutron [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.739730] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71edaaad-ec2c-4e91-b7e6-b99183109536 tempest-ServerTagsTestJSON-1861260756 tempest-ServerTagsTestJSON-1861260756-project-member] Lock "79c34bad-f29b-4d5e-97d5-6bfd6be55b31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.555s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.081711] env[61905]: DEBUG nova.scheduler.client.report [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.216583] env[61905]: INFO nova.compute.manager [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] [instance: a9ce5207-c493-4924-8371-db65cf359523] Took 1.04 seconds to deallocate network for instance. [ 623.241841] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 623.589938] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.590506] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 623.594423] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.409s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.769654] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.987484] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "9222cafc-fcee-40b9-b6c3-f1cf677324ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.987793] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "9222cafc-fcee-40b9-b6c3-f1cf677324ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.097335] env[61905]: DEBUG nova.compute.utils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 624.098015] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 624.098015] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 624.170137] env[61905]: DEBUG nova.policy [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ff25da762d5421b9f1e24e4bcead22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cd0317a9e0e4f1d86c49a82e8ffbaa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 624.254912] env[61905]: INFO nova.scheduler.client.report [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Deleted allocations for instance a9ce5207-c493-4924-8371-db65cf359523 [ 624.601490] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 624.636622] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 2a8bcc04-5519-4890-839b-64dcf422526d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 624.636622] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e997db40-b3a6-4c06-8991-cdb96954c0ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 624.636622] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 426a5334-83fb-4c2a-85ac-42a8dddd775b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 624.636777] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance eb372895-68b6-41cb-8ae5-dbfd57387505 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 624.722514] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Successfully created port: 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.768713] env[61905]: DEBUG oslo_concurrency.lockutils [None req-203e1935-eddd-49c3-ac69-c15ce3b2344b tempest-DeleteServersAdminTestJSON-2084086551 tempest-DeleteServersAdminTestJSON-2084086551-project-member] Lock "a9ce5207-c493-4924-8371-db65cf359523" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.740s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.139861] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance b9199119-9d4e-4b04-8675-22f6680da8b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 625.276762] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 625.615265] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 625.645757] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 949307dd-f8c4-4a79-ad82-99d416d06332 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 625.661872] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 625.662423] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 625.662546] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.662737] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 625.662975] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.663257] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 625.663516] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 625.664266] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 625.664266] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 625.664652] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 625.664652] env[61905]: DEBUG nova.virt.hardware [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 625.665547] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856d6852-a848-4b8e-b92c-a8ad00a1bbd7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.677536] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd208d9e-3c01-4937-8fc1-9c9b0e6036ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.825511] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.155875] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e6a063b4-d4f8-46ae-89ae-2d66637896ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 626.179453] env[61905]: DEBUG nova.compute.manager [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Received event network-changed-32febe8b-1fa3-485d-a18b-2ea1db9bd4ef {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 626.179651] env[61905]: DEBUG nova.compute.manager [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Refreshing instance network info cache due to event network-changed-32febe8b-1fa3-485d-a18b-2ea1db9bd4ef. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 626.181707] env[61905]: DEBUG oslo_concurrency.lockutils [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] Acquiring lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.181707] env[61905]: DEBUG oslo_concurrency.lockutils [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] Acquired lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.181707] env[61905]: DEBUG nova.network.neutron [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Refreshing network info cache for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 626.637726] env[61905]: ERROR nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 626.637726] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.637726] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.637726] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.637726] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.637726] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.637726] env[61905]: ERROR nova.compute.manager raise self.value [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.637726] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 626.637726] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.637726] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 626.638440] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.638440] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 626.638440] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 626.638440] env[61905]: ERROR nova.compute.manager [ 626.638440] env[61905]: Traceback (most recent call last): [ 626.638440] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 626.638440] env[61905]: listener.cb(fileno) [ 626.638440] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.638440] env[61905]: result = function(*args, **kwargs) [ 626.638440] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 626.638440] env[61905]: return func(*args, **kwargs) [ 626.638440] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.638440] env[61905]: raise e [ 626.638440] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.638440] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 626.638440] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.638440] env[61905]: created_port_ids = self._update_ports_for_instance( [ 626.638440] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.638440] env[61905]: with excutils.save_and_reraise_exception(): [ 626.638440] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.638440] env[61905]: self.force_reraise() [ 626.638440] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.638440] env[61905]: raise self.value [ 626.638440] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.638440] env[61905]: updated_port = self._update_port( [ 626.638440] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.638440] env[61905]: _ensure_no_port_binding_failure(port) [ 626.638440] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.638440] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 626.639272] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 626.639272] env[61905]: Removing descriptor: 18 [ 626.639272] env[61905]: ERROR nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Traceback (most recent call last): [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] yield resources [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self.driver.spawn(context, instance, image_meta, [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.639272] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] vm_ref = self.build_virtual_machine(instance, [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] for vif in network_info: [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return self._sync_wrapper(fn, *args, **kwargs) [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self.wait() [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self[:] = self._gt.wait() [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return self._exit_event.wait() [ 626.639726] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] result = hub.switch() [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return self.greenlet.switch() [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] result = function(*args, **kwargs) [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return func(*args, **kwargs) [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] raise e [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] nwinfo = self.network_api.allocate_for_instance( [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 626.640108] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] created_port_ids = self._update_ports_for_instance( [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] with excutils.save_and_reraise_exception(): [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self.force_reraise() [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] raise self.value [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] updated_port = self._update_port( [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] _ensure_no_port_binding_failure(port) [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.640618] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] raise exception.PortBindingFailed(port_id=port['id']) [ 626.641836] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 626.641836] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] [ 626.641836] env[61905]: INFO nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Terminating instance [ 626.641836] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.659111] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance b92a6db6-c51a-45c8-9792-d394027bcb7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 626.713330] env[61905]: DEBUG nova.network.neutron [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.864356] env[61905]: DEBUG nova.network.neutron [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.162434] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 362c9148-9f78-4700-9c6f-7fd0eaef4bd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 627.368037] env[61905]: DEBUG oslo_concurrency.lockutils [req-daa1e103-833e-40d6-9b5b-0f784c6b3def req-00730a16-1cc3-4c8a-8ab8-0cc0fddc53f7 service nova] Releasing lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.368486] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.368682] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.665057] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 105aed8e-4268-4553-9564-1540cb5176dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 627.895175] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 627.902291] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquiring lock "1b57ca4b-a8b9-497f-bc81-71c31510093e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.902525] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Lock "1b57ca4b-a8b9-497f-bc81-71c31510093e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.056313] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.168253] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 60f2eb1d-de4c-4318-98c3-eb2d411c120b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 628.218470] env[61905]: DEBUG nova.compute.manager [req-438be665-26ea-4e09-8a92-b895554fa074 req-be85bebe-c05c-4664-bcae-f4cfe97f1d33 service nova] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Received event network-vif-deleted-32febe8b-1fa3-485d-a18b-2ea1db9bd4ef {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 628.561043] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.561292] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 628.561552] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 628.561926] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e71146b-e472-48be-8c5d-0f4e14d3634b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.572132] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869c3cd8-2807-4185-b6be-80e15c4ede15 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.593203] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eb372895-68b6-41cb-8ae5-dbfd57387505 could not be found. [ 628.593503] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 628.593586] env[61905]: INFO nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Took 0.03 seconds to destroy the instance on the hypervisor. [ 628.593818] env[61905]: DEBUG oslo.service.loopingcall [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.594062] env[61905]: DEBUG nova.compute.manager [-] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 628.594169] env[61905]: DEBUG nova.network.neutron [-] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 628.608845] env[61905]: DEBUG nova.network.neutron [-] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.671292] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ded96da7-74a4-4364-8424-22000411f5fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 629.111468] env[61905]: DEBUG nova.network.neutron [-] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.173891] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 629.614200] env[61905]: INFO nova.compute.manager [-] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Took 1.02 seconds to deallocate network for instance. [ 629.617256] env[61905]: DEBUG nova.compute.claims [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 629.617431] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.679927] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 2a778ae5-37be-4479-b7ff-4468d0433c86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 629.867211] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquiring lock "1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.867436] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Lock "1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.183373] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 03f9b48c-4bd1-4018-b34f-267e1575c753 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 630.687372] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 84428003-72b1-467a-baf5-06ac37205622 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 631.191849] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 7ae6338f-289f-415a-b261-3be2f9948572 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 631.695867] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 7e393163-cd68-4de2-8051-7ec10415e508 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.198992] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c7e66b30-f72d-4afd-aded-4a92dd19b388 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 632.701957] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 0a1e2a21-a43d-4363-9f1f-683e35d199aa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 633.205561] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 633.708996] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ef6e5c2c-1778-4079-ae35-55f9264a060d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 634.212866] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 8d2cb485-32da-4fe7-8462-d98c071a6310 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 634.717159] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 6783654c-4f87-4353-b9ba-1299158eba3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 635.220721] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 24f832e6-9704-4105-a17f-c5c77fa52d74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 635.723727] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 5bdd7f80-e321-475f-8132-7047a7f24c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 636.226978] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d4e5eb34-6f16-4920-9f95-7ea8b080084b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 636.730344] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 090f2b46-e4f1-4b6b-b596-dd1937969007 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 637.233933] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9222cafc-fcee-40b9-b6c3-f1cf677324ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 637.234216] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 637.234367] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 637.646567] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4809688c-8bcc-40b6-9a70-cb4438c190f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.653797] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c634fe18-f323-41d7-8792-6c91fbff0549 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.684023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f70a12-acbf-4fa9-afe9-80adf8a6dcf1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.693213] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e8357b-1ace-4adb-8624-54eac3962ed6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.709093] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.213082] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 638.718338] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 638.718586] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 15.124s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.718861] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.856s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.720329] env[61905]: INFO nova.compute.claims [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.067427] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e82342a-f117-4cd5-a5d5-5531f8e6597d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.074210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5db356-e8ac-400c-bd9b-c452738e8847 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.104123] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea51f84-83b2-42be-9f9a-8def87084326 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.111301] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6e00d9-b820-48b8-9576-99045a58a166 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.123907] env[61905]: DEBUG nova.compute.provider_tree [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.626933] env[61905]: DEBUG nova.scheduler.client.report [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 641.132683] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.133283] env[61905]: DEBUG nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 641.135950] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.045s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.137449] env[61905]: INFO nova.compute.claims [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.643668] env[61905]: DEBUG nova.compute.utils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.647665] env[61905]: DEBUG nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 642.148904] env[61905]: DEBUG nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 642.477017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c7e5f3-41d7-4afd-ac1e-9f79ab937a62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.484808] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafb5095-9202-498e-88dc-db8f7c9ed4ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.515666] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fd71fb-2edf-47ea-a9dd-0d1f4376fd0b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.522627] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1ac693-462a-4d16-95c7-9e9b0d1a8f22 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.536284] env[61905]: DEBUG nova.compute.provider_tree [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.039727] env[61905]: DEBUG nova.scheduler.client.report [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 643.163321] env[61905]: DEBUG nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 643.193688] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.193952] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.194136] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.194343] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.194479] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.194619] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.194818] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.194970] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.197862] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.197862] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.197862] env[61905]: DEBUG nova.virt.hardware [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.198300] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfc0baa-bff3-4bef-adc7-539cb27f8218 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.207371] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149ca75e-d582-408f-8845-9c07f89ba416 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.220877] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.226204] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating folder: Project (8e7c794d396d44bab946f1d202906296). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.226694] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13c3d052-bab5-4cf2-ae0e-12a822fab5ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.238680] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Created folder: Project (8e7c794d396d44bab946f1d202906296) in parent group-v289968. [ 643.238905] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating folder: Instances. Parent ref: group-v289985. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.239178] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a2f9bdf-ede9-47e1-8eed-3a659b18697a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.250505] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Created folder: Instances in parent group-v289985. [ 643.250748] env[61905]: DEBUG oslo.service.loopingcall [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.250968] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.251307] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47012e20-589e-4801-aacc-98c469cdf653 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.268044] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.268044] env[61905]: value = "task-1362313" [ 643.268044] env[61905]: _type = "Task" [ 643.268044] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.277063] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362313, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.544567] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.545099] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 643.547848] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.405s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.548046] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.549934] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.415s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.551602] env[61905]: INFO nova.compute.claims [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.571742] env[61905]: INFO nova.scheduler.client.report [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Deleted allocations for instance 2a8bcc04-5519-4890-839b-64dcf422526d [ 643.778332] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362313, 'name': CreateVM_Task, 'duration_secs': 0.247325} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.778497] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.778915] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.780027] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.780027] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 643.780027] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15bb4ebe-260d-46af-b588-0418e1470a5d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.783927] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 643.783927] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e0d4e2-9ca1-986d-54e5-0ba28836b3ee" [ 643.783927] env[61905]: _type = "Task" [ 643.783927] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.791384] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e0d4e2-9ca1-986d-54e5-0ba28836b3ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.056051] env[61905]: DEBUG nova.compute.utils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 644.058782] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 644.058947] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 644.082852] env[61905]: DEBUG oslo_concurrency.lockutils [None req-077cd69b-4e5a-4db1-8eb1-2d1d33500735 tempest-ServerDiagnosticsV248Test-884006041 tempest-ServerDiagnosticsV248Test-884006041-project-member] Lock "2a8bcc04-5519-4890-839b-64dcf422526d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.997s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.119190] env[61905]: DEBUG nova.policy [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4802b9aa6514255b8cf79a653c0e1d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92c117e0ee954b7e9ba5be4304a107ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 644.296937] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e0d4e2-9ca1-986d-54e5-0ba28836b3ee, 'name': SearchDatastore_Task, 'duration_secs': 0.008654} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.297315] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.298334] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.298334] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.298334] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.298334] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.298500] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6c25034-76b2-4073-a290-86063cb9e915 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.306112] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.306112] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.306831] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b761925-8e50-42f8-8874-da24d5f1273c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.311893] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 644.311893] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ccb866-dddd-b028-7f24-cc4e75cf4126" [ 644.311893] env[61905]: _type = "Task" [ 644.311893] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.323419] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ccb866-dddd-b028-7f24-cc4e75cf4126, 'name': SearchDatastore_Task, 'duration_secs': 0.008728} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.323999] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f09827e7-6107-4c97-88bc-23037d823911 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.328796] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 644.328796] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f19ad3-97bd-a2e2-c263-1b85cfd63441" [ 644.328796] env[61905]: _type = "Task" [ 644.328796] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.336148] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f19ad3-97bd-a2e2-c263-1b85cfd63441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.562649] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 644.724326] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Successfully created port: 61353403-af3f-4851-9acf-de7a0ca84efa {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.845133] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f19ad3-97bd-a2e2-c263-1b85cfd63441, 'name': SearchDatastore_Task, 'duration_secs': 0.008542} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.848919] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.848919] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] b9199119-9d4e-4b04-8675-22f6680da8b1/b9199119-9d4e-4b04-8675-22f6680da8b1.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.851476] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fe38696-bb3a-43c7-a670-a3d5c3c59eaa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.857701] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 644.857701] env[61905]: value = "task-1362314" [ 644.857701] env[61905]: _type = "Task" [ 644.857701] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.866013] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.029277] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd18b33d-5514-42ce-9056-809a76781f29 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.036177] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dff988f-7ddc-4abb-932d-1a964e53fb8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.068878] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad030c8f-da47-4208-a051-70b8f7530991 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.080188] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b7fda6-2ef7-4bc7-8b14-208bb94f29f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.095246] env[61905]: DEBUG nova.compute.provider_tree [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.368785] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362314, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.577290] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 645.598958] env[61905]: DEBUG nova.scheduler.client.report [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 645.611509] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 645.612359] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 645.612359] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.612359] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 645.612359] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.612572] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 645.612625] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 645.612808] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 645.612937] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 645.613441] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 645.613673] env[61905]: DEBUG nova.virt.hardware [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.614685] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39974434-e22b-454b-8410-71ae31355d54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.623832] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b6b7ea-18ab-42c7-a7df-3669cfd7c00e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.793051] env[61905]: DEBUG nova.compute.manager [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Received event network-changed-61353403-af3f-4851-9acf-de7a0ca84efa {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.793051] env[61905]: DEBUG nova.compute.manager [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Refreshing instance network info cache due to event network-changed-61353403-af3f-4851-9acf-de7a0ca84efa. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 645.793051] env[61905]: DEBUG oslo_concurrency.lockutils [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] Acquiring lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.793051] env[61905]: DEBUG oslo_concurrency.lockutils [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] Acquired lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.793051] env[61905]: DEBUG nova.network.neutron [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Refreshing network info cache for port 61353403-af3f-4851-9acf-de7a0ca84efa {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.871258] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526882} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.871484] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] b9199119-9d4e-4b04-8675-22f6680da8b1/b9199119-9d4e-4b04-8675-22f6680da8b1.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.871704] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.871950] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15a27709-269d-4b95-9502-d90a190fa527 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.878164] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 645.878164] env[61905]: value = "task-1362315" [ 645.878164] env[61905]: _type = "Task" [ 645.878164] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.886378] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.042080] env[61905]: ERROR nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 646.042080] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.042080] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.042080] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.042080] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.042080] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.042080] env[61905]: ERROR nova.compute.manager raise self.value [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.042080] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 646.042080] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.042080] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 646.042512] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.042512] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 646.042512] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 646.042512] env[61905]: ERROR nova.compute.manager [ 646.042512] env[61905]: Traceback (most recent call last): [ 646.042512] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 646.042512] env[61905]: listener.cb(fileno) [ 646.042512] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.042512] env[61905]: result = function(*args, **kwargs) [ 646.042512] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.042512] env[61905]: return func(*args, **kwargs) [ 646.042512] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.042512] env[61905]: raise e [ 646.042512] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.042512] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 646.042512] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.042512] env[61905]: created_port_ids = self._update_ports_for_instance( [ 646.042512] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.042512] env[61905]: with excutils.save_and_reraise_exception(): [ 646.042512] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.042512] env[61905]: self.force_reraise() [ 646.042512] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.042512] env[61905]: raise self.value [ 646.042512] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.042512] env[61905]: updated_port = self._update_port( [ 646.042512] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.042512] env[61905]: _ensure_no_port_binding_failure(port) [ 646.042512] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.042512] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 646.043260] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 646.043260] env[61905]: Removing descriptor: 18 [ 646.043260] env[61905]: ERROR nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Traceback (most recent call last): [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] yield resources [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self.driver.spawn(context, instance, image_meta, [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.043260] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] vm_ref = self.build_virtual_machine(instance, [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] for vif in network_info: [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return self._sync_wrapper(fn, *args, **kwargs) [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self.wait() [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self[:] = self._gt.wait() [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return self._exit_event.wait() [ 646.043651] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] result = hub.switch() [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return self.greenlet.switch() [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] result = function(*args, **kwargs) [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return func(*args, **kwargs) [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] raise e [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] nwinfo = self.network_api.allocate_for_instance( [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 646.044129] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] created_port_ids = self._update_ports_for_instance( [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] with excutils.save_and_reraise_exception(): [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self.force_reraise() [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] raise self.value [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] updated_port = self._update_port( [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] _ensure_no_port_binding_failure(port) [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.044485] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] raise exception.PortBindingFailed(port_id=port['id']) [ 646.044806] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 646.044806] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] [ 646.044806] env[61905]: INFO nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Terminating instance [ 646.046079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquiring lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.103888] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.104463] env[61905]: DEBUG nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 646.107049] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.673s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.108517] env[61905]: INFO nova.compute.claims [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.309023] env[61905]: DEBUG nova.network.neutron [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.382932] env[61905]: DEBUG nova.network.neutron [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.389457] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068465} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.389705] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.390457] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ecbf97-8c7e-4e91-84d4-596b8f409aab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.410469] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] b9199119-9d4e-4b04-8675-22f6680da8b1/b9199119-9d4e-4b04-8675-22f6680da8b1.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.411043] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-690cc92d-f562-4bcb-9056-53bc08495367 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.433119] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 646.433119] env[61905]: value = "task-1362316" [ 646.433119] env[61905]: _type = "Task" [ 646.433119] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.442028] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.616051] env[61905]: DEBUG nova.compute.utils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.617099] env[61905]: DEBUG nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 646.886895] env[61905]: DEBUG oslo_concurrency.lockutils [req-cfd9b393-7ab6-4310-b0ca-6938374b4c89 req-69d0d6ff-ab3f-4cf1-9734-8d58a5969004 service nova] Releasing lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.886895] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquired lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.886895] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 646.944211] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362316, 'name': ReconfigVM_Task, 'duration_secs': 0.278387} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.944592] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Reconfigured VM instance instance-00000019 to attach disk [datastore2] b9199119-9d4e-4b04-8675-22f6680da8b1/b9199119-9d4e-4b04-8675-22f6680da8b1.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.945487] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-650ab9e6-061d-4e88-a63a-84ff9a21e585 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.952424] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 646.952424] env[61905]: value = "task-1362317" [ 646.952424] env[61905]: _type = "Task" [ 646.952424] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.961471] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362317, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.121917] env[61905]: DEBUG nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 647.409286] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.469196] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362317, 'name': Rename_Task, 'duration_secs': 0.130405} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.469487] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 647.469733] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b980d19b-402f-462b-abc9-3f76ef302aa4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.475969] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 647.475969] env[61905]: value = "task-1362318" [ 647.475969] env[61905]: _type = "Task" [ 647.475969] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.487891] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.517267] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.571205] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff16bf7d-a377-4692-81bc-121e78a89a67 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.581193] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9c73bc-9435-4687-8419-589c35066e27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.612604] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7fddf0-39f6-4e50-a38d-ac641650eb96 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.620276] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d607f38-635f-4a0b-8cf9-768c05a23609 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.637312] env[61905]: DEBUG nova.compute.provider_tree [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.921831] env[61905]: DEBUG nova.compute.manager [req-616341da-d549-43c7-9323-0552e9ee2c01 req-8d96e331-aa78-47c0-a46b-effef7697fbb service nova] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Received event network-vif-deleted-61353403-af3f-4851-9acf-de7a0ca84efa {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.987162] env[61905]: DEBUG oslo_vmware.api [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362318, 'name': PowerOnVM_Task, 'duration_secs': 0.454793} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.987592] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.987661] env[61905]: INFO nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Took 4.82 seconds to spawn the instance on the hypervisor. [ 647.987840] env[61905]: DEBUG nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 647.991343] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e6b945-601e-4ea1-8c1b-659835a7ea62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.020222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Releasing lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.020622] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 648.020809] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 648.021124] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b3c7dec-f4f8-4327-82cb-2e0319262169 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.031679] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b673342-847a-4bc2-988b-c83aed56591b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.057102] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 949307dd-f8c4-4a79-ad82-99d416d06332 could not be found. [ 648.057102] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.057102] env[61905]: INFO nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Took 0.04 seconds to destroy the instance on the hypervisor. [ 648.057102] env[61905]: DEBUG oslo.service.loopingcall [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.057102] env[61905]: DEBUG nova.compute.manager [-] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 648.057102] env[61905]: DEBUG nova.network.neutron [-] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.078783] env[61905]: DEBUG nova.network.neutron [-] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.142073] env[61905]: DEBUG nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 648.143958] env[61905]: DEBUG nova.scheduler.client.report [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 648.181222] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 648.181472] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 648.181619] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.183436] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 648.183436] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.183630] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 648.183750] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 648.183918] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 648.184108] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 648.184887] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 648.184887] env[61905]: DEBUG nova.virt.hardware [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.186028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b019bcc1-4752-4871-8100-ee2092d9424c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.194789] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943d1000-5f1c-4fde-ad1a-b48dbb887fc6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.211075] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.220304] env[61905]: DEBUG oslo.service.loopingcall [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.220581] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 648.221428] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35355975-51b4-40f8-83fd-edc76297e8d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.239952] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 648.239952] env[61905]: value = "task-1362319" [ 648.239952] env[61905]: _type = "Task" [ 648.239952] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.248595] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362319, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.508711] env[61905]: INFO nova.compute.manager [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Took 43.67 seconds to build instance. [ 648.581919] env[61905]: DEBUG nova.network.neutron [-] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.653038] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.653038] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 648.654344] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 32.509s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.750658] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362319, 'name': CreateVM_Task, 'duration_secs': 0.290634} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.750889] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 648.751781] env[61905]: DEBUG oslo_vmware.service [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdea584-8d8f-4caf-ab4e-292d3adb3f46 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.758543] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.758717] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.759079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 648.759315] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc50ac46-d9b3-45b7-92dd-1a614e2e45f8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.763962] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 648.763962] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d56810-95db-6517-9821-c61495ee69c2" [ 648.763962] env[61905]: _type = "Task" [ 648.763962] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.772551] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d56810-95db-6517-9821-c61495ee69c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.013514] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1dca0d0f-9cdd-47de-9d57-cb6f26ff7ea8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "b9199119-9d4e-4b04-8675-22f6680da8b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.401s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.085711] env[61905]: INFO nova.compute.manager [-] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Took 1.03 seconds to deallocate network for instance. [ 649.087939] env[61905]: DEBUG nova.compute.claims [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 649.088219] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.160091] env[61905]: DEBUG nova.compute.utils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 649.164484] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 649.164652] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.225774] env[61905]: DEBUG nova.policy [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bacb2f35d1e43e08adf2ca42149e6a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8e003b9359346418cead91f86082c4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 649.275938] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.276743] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.276743] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.276743] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.276901] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.277032] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62392de4-13b8-48ca-8793-54feb10bcc1e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.293971] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.294188] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 649.295053] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6247bf-93ac-4b5a-a039-e5a07e6bac3e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.307262] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beb53c35-91cb-4340-9fb4-edf629ce8099 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.312627] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 649.312627] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52128815-8415-1d92-ad62-e2c3c389aedc" [ 649.312627] env[61905]: _type = "Task" [ 649.312627] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.320630] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52128815-8415-1d92-ad62-e2c3c389aedc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.518935] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 649.558018] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Successfully created port: 7b181c94-3a30-420f-bbbd-8cbb5655b059 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.640903] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquiring lock "d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.640903] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Lock "d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.646018] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171c2939-f325-4a62-9a9f-7323e7084f7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.655066] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81179ff6-5564-4d9c-97ee-1bf02b9bf2da {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.687289] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 649.691384] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646b06a1-585f-41e8-abe4-055c5f75e46b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.699312] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9063efd9-426d-4cbc-8dc2-0f06cd408755 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.714839] env[61905]: DEBUG nova.compute.provider_tree [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.824484] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Preparing fetch location {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 649.824845] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating directory with path [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.825207] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3d3cea9-eb2a-459b-a3f5-46823afbe8c1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.849160] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Created directory with path [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.849391] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Fetch image to [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 649.849565] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Downloading image file data 4d166298-c700-4bc6-8f8f-67684a277053 to [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk on the data store datastore1 {{(pid=61905) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 649.850734] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a93de1-be01-4e41-a3d0-0f636c89b0ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.859075] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495bc425-e4de-4ffd-8aed-b381dd8c5786 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.869481] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a95d140-9ab0-4918-b472-e09fb10992d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.904661] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Successfully created port: 126845a2-2643-4ea6-aecc-d1e91bcc2c6d {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.906579] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b63c2e-011e-40b2-a026-5e117b75923f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.913957] env[61905]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-84ef92b7-af1a-47e0-af9d-bd5a91f40cf2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.935670] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Downloading image file data 4d166298-c700-4bc6-8f8f-67684a277053 to the data store datastore1 {{(pid=61905) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 649.995794] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61905) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 650.080320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.220539] env[61905]: DEBUG nova.scheduler.client.report [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 650.257100] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Successfully created port: a13007b7-9a21-4d97-baf1-975cfd1804c8 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 650.700638] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 650.707179] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Completed reading data from the image iterator. {{(pid=61905) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 650.707530] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 650.725038] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.071s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.725672] env[61905]: ERROR nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Traceback (most recent call last): [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self.driver.spawn(context, instance, image_meta, [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] vm_ref = self.build_virtual_machine(instance, [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.725672] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] for vif in network_info: [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return self._sync_wrapper(fn, *args, **kwargs) [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self.wait() [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self[:] = self._gt.wait() [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return self._exit_event.wait() [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] result = hub.switch() [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.726020] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return self.greenlet.switch() [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] result = function(*args, **kwargs) [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] return func(*args, **kwargs) [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] raise e [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] nwinfo = self.network_api.allocate_for_instance( [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] created_port_ids = self._update_ports_for_instance( [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] with excutils.save_and_reraise_exception(): [ 650.726461] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] self.force_reraise() [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] raise self.value [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] updated_port = self._update_port( [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] _ensure_no_port_binding_failure(port) [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] raise exception.PortBindingFailed(port_id=port['id']) [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] nova.exception.PortBindingFailed: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. [ 650.726761] env[61905]: ERROR nova.compute.manager [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] [ 650.727029] env[61905]: DEBUG nova.compute.utils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 650.729908] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.729908] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.729908] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.730174] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.730262] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.730412] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.730647] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.730807] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.730969] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.731141] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.731312] env[61905]: DEBUG nova.virt.hardware [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.731652] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.981s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.734939] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c2923b-28c3-464d-b57d-3245e0b99624 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.738307] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Build of instance e997db40-b3a6-4c06-8991-cdb96954c0ca was re-scheduled: Binding failed for port 97bce231-0da1-492b-8afa-feda650f65cb, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 650.738760] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 650.738977] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquiring lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.739136] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Acquired lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.739343] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.747242] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099de8b5-181e-4159-97c9-b262828751e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.855974] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Downloaded image file data 4d166298-c700-4bc6-8f8f-67684a277053 to vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk on the data store datastore1 {{(pid=61905) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 650.858315] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Caching image {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 650.858587] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copying Virtual Disk [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk to [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.858884] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48de4c3a-e512-4bba-b9ef-335b2bab2e70 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.866724] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 650.866724] env[61905]: value = "task-1362320" [ 650.866724] env[61905]: _type = "Task" [ 650.866724] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.875563] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.166517] env[61905]: DEBUG nova.compute.manager [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Received event network-changed-7b181c94-3a30-420f-bbbd-8cbb5655b059 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 651.166706] env[61905]: DEBUG nova.compute.manager [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Refreshing instance network info cache due to event network-changed-7b181c94-3a30-420f-bbbd-8cbb5655b059. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 651.166911] env[61905]: DEBUG oslo_concurrency.lockutils [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] Acquiring lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.167069] env[61905]: DEBUG oslo_concurrency.lockutils [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] Acquired lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.167289] env[61905]: DEBUG nova.network.neutron [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Refreshing network info cache for port 7b181c94-3a30-420f-bbbd-8cbb5655b059 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.269130] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.376060] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362320, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.398739] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.413152] env[61905]: ERROR nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 651.413152] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 651.413152] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 651.413152] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 651.413152] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 651.413152] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 651.413152] env[61905]: ERROR nova.compute.manager raise self.value [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 651.413152] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 651.413152] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 651.413152] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 651.413630] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 651.413630] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 651.413630] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 651.413630] env[61905]: ERROR nova.compute.manager [ 651.413630] env[61905]: Traceback (most recent call last): [ 651.413630] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 651.413630] env[61905]: listener.cb(fileno) [ 651.413630] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 651.413630] env[61905]: result = function(*args, **kwargs) [ 651.413630] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 651.413630] env[61905]: return func(*args, **kwargs) [ 651.413630] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 651.413630] env[61905]: raise e [ 651.413630] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 651.413630] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 651.413630] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 651.413630] env[61905]: created_port_ids = self._update_ports_for_instance( [ 651.413630] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 651.413630] env[61905]: with excutils.save_and_reraise_exception(): [ 651.413630] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 651.413630] env[61905]: self.force_reraise() [ 651.413630] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 651.413630] env[61905]: raise self.value [ 651.413630] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 651.413630] env[61905]: updated_port = self._update_port( [ 651.413630] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 651.413630] env[61905]: _ensure_no_port_binding_failure(port) [ 651.413630] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 651.413630] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 651.414465] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 651.414465] env[61905]: Removing descriptor: 18 [ 651.414465] env[61905]: ERROR nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Traceback (most recent call last): [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] yield resources [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self.driver.spawn(context, instance, image_meta, [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 651.414465] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] vm_ref = self.build_virtual_machine(instance, [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] vif_infos = vmwarevif.get_vif_info(self._session, [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] for vif in network_info: [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return self._sync_wrapper(fn, *args, **kwargs) [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self.wait() [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self[:] = self._gt.wait() [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return self._exit_event.wait() [ 651.414791] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] result = hub.switch() [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return self.greenlet.switch() [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] result = function(*args, **kwargs) [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return func(*args, **kwargs) [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] raise e [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] nwinfo = self.network_api.allocate_for_instance( [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 651.415174] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] created_port_ids = self._update_ports_for_instance( [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] with excutils.save_and_reraise_exception(): [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self.force_reraise() [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] raise self.value [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] updated_port = self._update_port( [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] _ensure_no_port_binding_failure(port) [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 651.415589] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] raise exception.PortBindingFailed(port_id=port['id']) [ 651.415933] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 651.415933] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] [ 651.415933] env[61905]: INFO nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Terminating instance [ 651.417653] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.641658] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8bd809-0156-41a8-8806-34a7ec4f7755 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.648890] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb16ea46-ec4e-49a7-983e-c7e9b368d7bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.679359] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c3cc06-4899-4abd-9f17-acb3fc9b9709 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.686746] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e463f50-c538-42bd-b822-de4eca8ad8c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.699954] env[61905]: DEBUG nova.compute.provider_tree [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.712331] env[61905]: DEBUG nova.network.neutron [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.825448] env[61905]: DEBUG nova.network.neutron [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.877491] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362320, 'name': CopyVirtualDisk_Task} progress is 70%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.900250] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Releasing lock "refresh_cache-e997db40-b3a6-4c06-8991-cdb96954c0ca" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.900496] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 651.900682] env[61905]: DEBUG nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 651.900844] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.916587] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.202358] env[61905]: DEBUG nova.scheduler.client.report [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 652.329675] env[61905]: DEBUG oslo_concurrency.lockutils [req-a18a2f47-3f5b-43c5-a4f0-bc033795308f req-55bc0ee2-bf17-4c42-b98c-39f9797b2da8 service nova] Releasing lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.329675] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquired lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.329675] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.377957] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362320, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.419148] env[61905]: DEBUG nova.network.neutron [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.707923] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.708622] env[61905]: ERROR nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Traceback (most recent call last): [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self.driver.spawn(context, instance, image_meta, [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] vm_ref = self.build_virtual_machine(instance, [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] vif_infos = vmwarevif.get_vif_info(self._session, [ 652.708622] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] for vif in network_info: [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return self._sync_wrapper(fn, *args, **kwargs) [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self.wait() [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self[:] = self._gt.wait() [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return self._exit_event.wait() [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] result = hub.switch() [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 652.708975] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return self.greenlet.switch() [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] result = function(*args, **kwargs) [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] return func(*args, **kwargs) [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] raise e [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] nwinfo = self.network_api.allocate_for_instance( [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] created_port_ids = self._update_ports_for_instance( [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] with excutils.save_and_reraise_exception(): [ 652.709355] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] self.force_reraise() [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] raise self.value [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] updated_port = self._update_port( [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] _ensure_no_port_binding_failure(port) [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] raise exception.PortBindingFailed(port_id=port['id']) [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] nova.exception.PortBindingFailed: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. [ 652.709718] env[61905]: ERROR nova.compute.manager [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] [ 652.710035] env[61905]: DEBUG nova.compute.utils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 652.710948] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.874s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.712546] env[61905]: INFO nova.compute.claims [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.715316] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Build of instance 426a5334-83fb-4c2a-85ac-42a8dddd775b was re-scheduled: Binding failed for port 66c29463-feaf-46d1-833a-37b3a678e28a, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 652.715751] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 652.716019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquiring lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.716144] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Acquired lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.716299] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.873411] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.881374] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362320, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.592297} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.881640] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copied Virtual Disk [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk to [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 652.881812] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleting the datastore file [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053/tmp-sparse.vmdk {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.882084] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5dab94bc-7283-4bb0-b518-f410788cc865 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.888027] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 652.888027] env[61905]: value = "task-1362321" [ 652.888027] env[61905]: _type = "Task" [ 652.888027] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.895544] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.922840] env[61905]: INFO nova.compute.manager [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] [instance: e997db40-b3a6-4c06-8991-cdb96954c0ca] Took 1.02 seconds to deallocate network for instance. [ 652.978801] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.193825] env[61905]: DEBUG nova.compute.manager [req-64618022-b25b-47b6-86dc-95557b1ffde3 req-bd301b7e-f18e-4819-93e2-2e6941afb167 service nova] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Received event network-vif-deleted-7b181c94-3a30-420f-bbbd-8cbb5655b059 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 653.235167] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.313914] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.399254] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.028427} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.399539] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 653.399753] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Moving file from [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f/4d166298-c700-4bc6-8f8f-67684a277053 to [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053. {{(pid=61905) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 653.400027] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-3b7ebe2f-f53a-42ba-bc59-3f0a0ae85de5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.406700] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 653.406700] env[61905]: value = "task-1362322" [ 653.406700] env[61905]: _type = "Task" [ 653.406700] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.415413] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362322, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.481259] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Releasing lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.481710] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 653.481901] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 653.482212] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8936978a-4895-45ae-8e0d-c8baa6610f84 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.490904] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feed9288-608e-454c-8bb9-7fd29a5eb6c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.514664] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b92a6db6-c51a-45c8-9792-d394027bcb7c could not be found. [ 653.514931] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 653.515088] env[61905]: INFO nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 653.515570] env[61905]: DEBUG oslo.service.loopingcall [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.515754] env[61905]: DEBUG nova.compute.manager [-] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 653.515841] env[61905]: DEBUG nova.network.neutron [-] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 653.559163] env[61905]: DEBUG nova.network.neutron [-] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.816608] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Releasing lock "refresh_cache-426a5334-83fb-4c2a-85ac-42a8dddd775b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.816878] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 653.817352] env[61905]: DEBUG nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 653.818143] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 653.844991] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 653.917181] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362322, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028029} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.917728] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] File moved {{(pid=61905) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 653.917728] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Cleaning up location [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 653.917932] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleting the datastore file [datastore1] vmware_temp/acf38724-d695-4dfe-9718-43b9b4a1759f {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 653.920943] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e5f6b36-78cd-4378-a5ff-1ec713f3f3ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.924965] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 653.924965] env[61905]: value = "task-1362323" [ 653.924965] env[61905]: _type = "Task" [ 653.924965] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.947203] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362323, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.956162] env[61905]: INFO nova.scheduler.client.report [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Deleted allocations for instance e997db40-b3a6-4c06-8991-cdb96954c0ca [ 654.154764] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3bdc70-0450-4ca2-8e40-f759d44289fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.163699] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9feebef2-971f-4ce3-8dfe-cbb9f12b68a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.191517] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a330435-4422-4b5e-8208-3bece87da255 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.200695] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4208ec4c-5bd2-4dc8-aad1-9d7bf3c66fad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.215145] env[61905]: DEBUG nova.compute.provider_tree [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.357420] env[61905]: DEBUG nova.network.neutron [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.435366] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362323, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026311} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.435692] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.436492] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d4ff602-f463-4e8c-87c1-2ffb21b65886 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.441377] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 654.441377] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527778df-8e71-c34e-fa75-38fa78702e79" [ 654.441377] env[61905]: _type = "Task" [ 654.441377] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.448510] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527778df-8e71-c34e-fa75-38fa78702e79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.466026] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a5ab8b96-48c9-43c5-8a9f-50df035cf119 tempest-ServerAddressesTestJSON-136838363 tempest-ServerAddressesTestJSON-136838363-project-member] Lock "e997db40-b3a6-4c06-8991-cdb96954c0ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.736s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.586636] env[61905]: DEBUG nova.network.neutron [-] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.720179] env[61905]: DEBUG nova.scheduler.client.report [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.853900] env[61905]: INFO nova.compute.manager [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] [instance: 426a5334-83fb-4c2a-85ac-42a8dddd775b] Took 1.04 seconds to deallocate network for instance. [ 654.952411] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527778df-8e71-c34e-fa75-38fa78702e79, 'name': SearchDatastore_Task, 'duration_secs': 0.009211} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.952614] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.952839] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.953089] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d824985-a579-4ce6-b0fa-33fd87089a4a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.959523] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 654.959523] env[61905]: value = "task-1362324" [ 654.959523] env[61905]: _type = "Task" [ 654.959523] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.967533] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.970012] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 655.089768] env[61905]: INFO nova.compute.manager [-] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Took 1.57 seconds to deallocate network for instance. [ 655.094780] env[61905]: DEBUG nova.compute.claims [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 655.094780] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.226029] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.226029] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 655.227848] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.458s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.229767] env[61905]: INFO nova.compute.claims [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.470453] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362324, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.502434] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.734587] env[61905]: DEBUG nova.compute.utils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.738368] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 655.738552] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.825273] env[61905]: DEBUG nova.policy [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8111a336f4924746b24a74660668fb80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38f827b2bb2f4fb4a700ff24bd663b9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 655.895871] env[61905]: INFO nova.scheduler.client.report [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Deleted allocations for instance 426a5334-83fb-4c2a-85ac-42a8dddd775b [ 655.975596] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655388} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.975862] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.976671] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.976956] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63459703-ecaa-4096-8038-04463e97ddb1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.983532] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 655.983532] env[61905]: value = "task-1362325" [ 655.983532] env[61905]: _type = "Task" [ 655.983532] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.992547] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.242022] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 656.349653] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Successfully created port: 9c2a282d-ec0f-4dd6-8aac-693e4551b443 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.404762] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a1eb211-107b-4fd2-96da-1b417823eadf tempest-ServerGroupTestJSON-1397674639 tempest-ServerGroupTestJSON-1397674639-project-member] Lock "426a5334-83fb-4c2a-85ac-42a8dddd775b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.831s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.501138] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064649} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.504844] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.504844] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00755575-d7ce-4695-8ccc-9f8a5bb72912 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.528965] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.531880] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ecce80f-e1e4-4730-a31e-9e77d3286a76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.567630] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 656.567630] env[61905]: value = "task-1362326" [ 656.567630] env[61905]: _type = "Task" [ 656.567630] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.577327] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362326, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.734269] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d1fc27-247c-4b0e-acb6-c3e25b8a9f3c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.741762] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c380981a-7736-4398-82b7-df6a5dc108cd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.746593] env[61905]: INFO nova.virt.block_device [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Booting with volume 05839fd6-ba5f-446e-990b-bbc778f52d56 at /dev/sda [ 656.791297] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21215edf-be3a-4374-a46e-41037aa6914b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.801052] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f92fbe-b0b8-4ad6-82d6-157c657dacf3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.815340] env[61905]: DEBUG nova.compute.provider_tree [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.837965] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6f014fe-eaea-42d7-a255-a7c4f22c8f94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.853129] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7d36d3-ed75-4d0d-ba52-88a65d4932a4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.875995] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7158976-8ff7-40c2-96f1-1595d7ec42d1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.883926] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f80f9a-7f5e-40e6-af7e-d6028089a5d2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.907348] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bda1844-8f0f-4146-863b-33e635e6735c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.910473] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 656.916392] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0f77a0-ffed-4ee6-957e-d342126fa3cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.931164] env[61905]: DEBUG nova.virt.block_device [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Updating existing volume attachment record: 25eb31ed-ba94-4d6b-bf9c-cd006c912c9d {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 657.076337] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362326, 'name': ReconfigVM_Task, 'duration_secs': 0.314028} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.076727] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Reconfigured VM instance instance-0000001b to attach disk [datastore1] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.077550] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a281271a-0e72-4377-b39e-753910bb55bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.084431] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 657.084431] env[61905]: value = "task-1362327" [ 657.084431] env[61905]: _type = "Task" [ 657.084431] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.092808] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362327, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.317507] env[61905]: DEBUG nova.scheduler.client.report [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.446009] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.541141] env[61905]: DEBUG nova.compute.manager [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Received event network-changed-9c2a282d-ec0f-4dd6-8aac-693e4551b443 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 657.541334] env[61905]: DEBUG nova.compute.manager [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Refreshing instance network info cache due to event network-changed-9c2a282d-ec0f-4dd6-8aac-693e4551b443. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 657.541572] env[61905]: DEBUG oslo_concurrency.lockutils [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] Acquiring lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.542331] env[61905]: DEBUG oslo_concurrency.lockutils [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] Acquired lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.542331] env[61905]: DEBUG nova.network.neutron [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Refreshing network info cache for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 657.594725] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362327, 'name': Rename_Task, 'duration_secs': 0.141384} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.594998] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.595247] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-015a20af-ef46-43fb-84b5-2989cc000958 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.605388] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 657.605388] env[61905]: value = "task-1362328" [ 657.605388] env[61905]: _type = "Task" [ 657.605388] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.613499] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.709035] env[61905]: ERROR nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 657.709035] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.709035] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.709035] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.709035] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.709035] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.709035] env[61905]: ERROR nova.compute.manager raise self.value [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.709035] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 657.709035] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.709035] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 657.709504] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.709504] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 657.709504] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 657.709504] env[61905]: ERROR nova.compute.manager [ 657.709504] env[61905]: Traceback (most recent call last): [ 657.709504] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 657.709504] env[61905]: listener.cb(fileno) [ 657.709504] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.709504] env[61905]: result = function(*args, **kwargs) [ 657.709504] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 657.709504] env[61905]: return func(*args, **kwargs) [ 657.709504] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.709504] env[61905]: raise e [ 657.709504] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.709504] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 657.709504] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 657.709504] env[61905]: created_port_ids = self._update_ports_for_instance( [ 657.709504] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 657.709504] env[61905]: with excutils.save_and_reraise_exception(): [ 657.709504] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.709504] env[61905]: self.force_reraise() [ 657.709504] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.709504] env[61905]: raise self.value [ 657.709504] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 657.709504] env[61905]: updated_port = self._update_port( [ 657.709504] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.709504] env[61905]: _ensure_no_port_binding_failure(port) [ 657.709504] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.709504] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 657.710221] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 657.710221] env[61905]: Removing descriptor: 18 [ 657.823359] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.823889] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 657.826695] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.828235] env[61905]: INFO nova.compute.claims [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.070864] env[61905]: DEBUG nova.network.neutron [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.115802] env[61905]: DEBUG oslo_vmware.api [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362328, 'name': PowerOnVM_Task, 'duration_secs': 0.459801} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.116148] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.116376] env[61905]: INFO nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Took 9.98 seconds to spawn the instance on the hypervisor. [ 658.116563] env[61905]: DEBUG nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 658.117327] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bde8c75-c44e-41e8-8bf3-47240d8d866f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.204300] env[61905]: DEBUG nova.network.neutron [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.333236] env[61905]: DEBUG nova.compute.utils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.338149] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 658.338442] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.401098] env[61905]: DEBUG nova.policy [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '680a17726c6745de98ee2a3c8a9fd0c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd2f515523524779831f0bdd5b10e9ce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 658.638438] env[61905]: INFO nova.compute.manager [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Took 49.52 seconds to build instance. [ 658.707478] env[61905]: DEBUG oslo_concurrency.lockutils [req-39a82f00-db13-45eb-85ca-3cf257c8de4f req-82696d6e-5d8c-4a6f-b726-ec680fb0b4a7 service nova] Releasing lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.839700] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 658.923536] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Successfully created port: e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.060889] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 659.061433] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.061635] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.061778] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.061986] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.062094] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.062234] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.062438] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.062583] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.062737] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.062888] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.063192] env[61905]: DEBUG nova.virt.hardware [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.064120] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3331be95-1684-4944-87e0-e7dfb7a76cc5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.077900] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a3330a-6f3d-4d0b-aa80-53779844ce77 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.099421] env[61905]: ERROR nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Traceback (most recent call last): [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] yield resources [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self.driver.spawn(context, instance, image_meta, [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] vm_ref = self.build_virtual_machine(instance, [ 659.099421] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] for vif in network_info: [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] return self._sync_wrapper(fn, *args, **kwargs) [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self.wait() [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self[:] = self._gt.wait() [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] return self._exit_event.wait() [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 659.099863] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] current.throw(*self._exc) [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] result = function(*args, **kwargs) [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] return func(*args, **kwargs) [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] raise e [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] nwinfo = self.network_api.allocate_for_instance( [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] created_port_ids = self._update_ports_for_instance( [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] with excutils.save_and_reraise_exception(): [ 659.100267] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self.force_reraise() [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] raise self.value [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] updated_port = self._update_port( [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] _ensure_no_port_binding_failure(port) [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] raise exception.PortBindingFailed(port_id=port['id']) [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 659.100768] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] [ 659.100768] env[61905]: INFO nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Terminating instance [ 659.101474] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquiring lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.101703] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquired lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.101928] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.138733] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c17db2d5-108f-4329-9ae0-3275e88b706d tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.738s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.373782] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87ee941-6ec9-46cf-9e13-547aada39dce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.381977] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0190317a-897d-4ce8-9d1e-b2d7d5b4985c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.425393] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a531fa-fb04-4085-baf3-2ba8c017f00f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.432948] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d8fde9-c7c8-465f-bf20-4abea6859d94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.446321] env[61905]: DEBUG nova.compute.provider_tree [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.450067] env[61905]: INFO nova.compute.manager [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Rebuilding instance [ 659.507872] env[61905]: DEBUG nova.compute.manager [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 659.508907] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b274fb0-059a-47a2-8758-b1f0ee894adf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.622518] env[61905]: DEBUG nova.compute.manager [req-71d9cfac-77ad-4834-b3e4-afcb76e1037d req-35128436-4b65-4352-8da4-29ca9a03ea57 service nova] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Received event network-vif-deleted-9c2a282d-ec0f-4dd6-8aac-693e4551b443 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 659.634662] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.641552] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 659.777068] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.855256] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 659.893881] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 659.893881] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 659.893881] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.894180] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 659.894180] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.894180] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 659.894293] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 659.895271] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 659.895271] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 659.895271] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 659.895271] env[61905]: DEBUG nova.virt.hardware [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.895877] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76987eec-874a-4cdd-90f9-0426926a0443 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.905610] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9bb783-e143-426f-bf47-30a2d8281940 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.955018] env[61905]: DEBUG nova.scheduler.client.report [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 660.021129] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 660.021474] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5279a048-a1ed-4d42-8549-f3fdad26c55e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.033563] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 660.033563] env[61905]: value = "task-1362329" [ 660.033563] env[61905]: _type = "Task" [ 660.033563] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.044674] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.167190] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.280630] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Releasing lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.281417] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 660.281857] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f4394fd-3fa6-4fc9-a085-e80542513df9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.294511] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c64939-4763-43c0-9af2-a03907d5ed60 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.317548] env[61905]: WARNING nova.virt.vmwareapi.driver [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 362c9148-9f78-4700-9c6f-7fd0eaef4bd7 could not be found. [ 660.317792] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 660.318108] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c938c0bf-e790-4ea8-a92e-fe7a6537976b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.326516] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b0582e-5772-46ae-9bf6-007713ae544f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.351141] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 362c9148-9f78-4700-9c6f-7fd0eaef4bd7 could not be found. [ 660.351381] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.351547] env[61905]: INFO nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Took 0.07 seconds to destroy the instance on the hypervisor. [ 660.351790] env[61905]: DEBUG oslo.service.loopingcall [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.352024] env[61905]: DEBUG nova.compute.manager [-] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 660.352123] env[61905]: DEBUG nova.network.neutron [-] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.412316] env[61905]: DEBUG nova.network.neutron [-] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.459580] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.460102] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 660.467156] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.849s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.546318] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362329, 'name': PowerOffVM_Task, 'duration_secs': 0.204174} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.546607] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 660.546820] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 660.547641] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9eadc3-c7be-48c5-8223-ff9183a18ecb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.556063] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 660.556227] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa9c1390-05f7-4ea0-9004-55938a593b73 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.581085] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 660.581370] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 660.581510] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleting the datastore file [datastore1] e6a063b4-d4f8-46ae-89ae-2d66637896ae {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.581781] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-358ad53e-fa30-4e0d-8df2-e9b518027211 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.593458] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 660.593458] env[61905]: value = "task-1362331" [ 660.593458] env[61905]: _type = "Task" [ 660.593458] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.602035] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.727997] env[61905]: ERROR nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 660.727997] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.727997] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.727997] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.727997] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.727997] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.727997] env[61905]: ERROR nova.compute.manager raise self.value [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.727997] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 660.727997] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.727997] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 660.728557] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.728557] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 660.728557] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 660.728557] env[61905]: ERROR nova.compute.manager [ 660.728557] env[61905]: Traceback (most recent call last): [ 660.728557] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 660.728557] env[61905]: listener.cb(fileno) [ 660.728557] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.728557] env[61905]: result = function(*args, **kwargs) [ 660.728557] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 660.728557] env[61905]: return func(*args, **kwargs) [ 660.728557] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 660.728557] env[61905]: raise e [ 660.728557] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.728557] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 660.728557] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.728557] env[61905]: created_port_ids = self._update_ports_for_instance( [ 660.728557] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.728557] env[61905]: with excutils.save_and_reraise_exception(): [ 660.728557] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.728557] env[61905]: self.force_reraise() [ 660.728557] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.728557] env[61905]: raise self.value [ 660.728557] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.728557] env[61905]: updated_port = self._update_port( [ 660.728557] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.728557] env[61905]: _ensure_no_port_binding_failure(port) [ 660.728557] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.728557] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 660.729376] env[61905]: nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 660.729376] env[61905]: Removing descriptor: 18 [ 660.729376] env[61905]: ERROR nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Traceback (most recent call last): [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] yield resources [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self.driver.spawn(context, instance, image_meta, [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 660.729376] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] vm_ref = self.build_virtual_machine(instance, [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] for vif in network_info: [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return self._sync_wrapper(fn, *args, **kwargs) [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self.wait() [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self[:] = self._gt.wait() [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return self._exit_event.wait() [ 660.729727] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] result = hub.switch() [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return self.greenlet.switch() [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] result = function(*args, **kwargs) [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return func(*args, **kwargs) [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] raise e [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] nwinfo = self.network_api.allocate_for_instance( [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.730109] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] created_port_ids = self._update_ports_for_instance( [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] with excutils.save_and_reraise_exception(): [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self.force_reraise() [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] raise self.value [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] updated_port = self._update_port( [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] _ensure_no_port_binding_failure(port) [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.730491] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] raise exception.PortBindingFailed(port_id=port['id']) [ 660.730833] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 660.730833] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] [ 660.730833] env[61905]: INFO nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Terminating instance [ 660.732107] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquiring lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.732270] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquired lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.732576] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 660.920456] env[61905]: DEBUG nova.network.neutron [-] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.968227] env[61905]: DEBUG nova.compute.utils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.973019] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 660.973019] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.066980] env[61905]: DEBUG nova.policy [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c96c7fd58c04379a6196d428ffcdffd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e36aced58abc41f0b5eba97157ffee2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 661.102183] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112754} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.105220] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 661.105533] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 661.105797] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.255815] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.411700] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.423749] env[61905]: INFO nova.compute.manager [-] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Took 1.07 seconds to deallocate network for instance. [ 661.424618] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dea1ebc-e774-44fd-9f2e-9734957471dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.438142] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2069b1-6a69-498d-9dd4-f20c7ca5d6b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.477634] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 661.481393] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad3c17c-cced-4024-ac85-8807232f5029 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.489357] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff69c96-ce0a-466e-8ab1-33fe8b13bbcf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.496139] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Successfully created port: 809c13b0-2e13-4056-ab7e-e2319e3d2000 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.505972] env[61905]: DEBUG nova.compute.provider_tree [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.724652] env[61905]: DEBUG nova.compute.manager [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Received event network-changed-e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 661.724983] env[61905]: DEBUG nova.compute.manager [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Refreshing instance network info cache due to event network-changed-e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 661.724983] env[61905]: DEBUG oslo_concurrency.lockutils [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] Acquiring lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.915972] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Releasing lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.916743] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 661.918874] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.920560] env[61905]: DEBUG oslo_concurrency.lockutils [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] Acquired lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.921137] env[61905]: DEBUG nova.network.neutron [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Refreshing network info cache for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.922528] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75459a29-7d1e-4f46-80cc-87af2629b072 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.932893] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea581711-1610-4ffb-8f87-a6a5e12bf5ea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.960894] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 105aed8e-4268-4553-9564-1540cb5176dc could not be found. [ 661.961159] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 661.961317] env[61905]: INFO nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 661.961590] env[61905]: DEBUG oslo.service.loopingcall [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.961771] env[61905]: DEBUG nova.compute.manager [-] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 661.961869] env[61905]: DEBUG nova.network.neutron [-] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 661.988750] env[61905]: DEBUG nova.network.neutron [-] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.013676] env[61905]: DEBUG nova.scheduler.client.report [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 662.018040] env[61905]: INFO nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Took 0.59 seconds to detach 1 volumes for instance. [ 662.021460] env[61905]: DEBUG nova.compute.claims [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 662.021460] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.146615] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.147178] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.147573] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.151016] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.151016] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.151016] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.151016] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.151016] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.151217] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.151217] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.151217] env[61905]: DEBUG nova.virt.hardware [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.151217] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8bafc5-e866-457a-9cce-7877bc45dbf1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.160353] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b817c90-faaf-49f1-9e8e-5fec7217011b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.180859] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.186709] env[61905]: DEBUG oslo.service.loopingcall [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.187108] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.187434] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57cf6854-face-403c-8dc0-b2058d354cd1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.208016] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.208016] env[61905]: value = "task-1362332" [ 662.208016] env[61905]: _type = "Task" [ 662.208016] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.216196] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362332, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.451532] env[61905]: DEBUG nova.network.neutron [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.491686] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 662.494059] env[61905]: DEBUG nova.network.neutron [-] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.517632] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.051s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.518258] env[61905]: ERROR nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Traceback (most recent call last): [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self.driver.spawn(context, instance, image_meta, [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] vm_ref = self.build_virtual_machine(instance, [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.518258] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] for vif in network_info: [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return self._sync_wrapper(fn, *args, **kwargs) [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self.wait() [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self[:] = self._gt.wait() [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return self._exit_event.wait() [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] result = hub.switch() [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.518598] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return self.greenlet.switch() [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] result = function(*args, **kwargs) [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] return func(*args, **kwargs) [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] raise e [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] nwinfo = self.network_api.allocate_for_instance( [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] created_port_ids = self._update_ports_for_instance( [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] with excutils.save_and_reraise_exception(): [ 662.518930] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] self.force_reraise() [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] raise self.value [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] updated_port = self._update_port( [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] _ensure_no_port_binding_failure(port) [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] raise exception.PortBindingFailed(port_id=port['id']) [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] nova.exception.PortBindingFailed: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. [ 662.519251] env[61905]: ERROR nova.compute.manager [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] [ 662.519511] env[61905]: DEBUG nova.compute.utils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 662.522019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.433s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.524673] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Build of instance eb372895-68b6-41cb-8ae5-dbfd57387505 was re-scheduled: Binding failed for port 32febe8b-1fa3-485d-a18b-2ea1db9bd4ef, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 662.525259] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 662.525420] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.525568] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.525721] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.529035] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.529292] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.529623] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.529716] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.529854] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.530038] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.530248] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.530435] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.530664] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.530862] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.531074] env[61905]: DEBUG nova.virt.hardware [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.532486] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a1c61-f568-4c7b-8dca-62f698532fc7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.544853] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2260c56e-b1ad-430b-9b5f-69e2358b9d7e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.552283] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.642640] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "79537eaa-5abf-477b-bce6-c079c9beb964" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.642875] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "79537eaa-5abf-477b-bce6-c079c9beb964" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.664103] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.685444] env[61905]: DEBUG nova.network.neutron [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.717707] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362332, 'name': CreateVM_Task, 'duration_secs': 0.277893} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.717879] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 662.718304] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.718463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.718785] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 662.719047] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58ce1ddd-1b8a-47cc-ae39-fcc3e196b73e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.723689] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 662.723689] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52542465-7d5d-b2db-6c55-8d1c000b4da7" [ 662.723689] env[61905]: _type = "Task" [ 662.723689] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.731727] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52542465-7d5d-b2db-6c55-8d1c000b4da7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.920043] env[61905]: ERROR nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 662.920043] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.920043] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.920043] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.920043] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.920043] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.920043] env[61905]: ERROR nova.compute.manager raise self.value [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.920043] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 662.920043] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.920043] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 662.920507] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.920507] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 662.920507] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 662.920507] env[61905]: ERROR nova.compute.manager [ 662.920507] env[61905]: Traceback (most recent call last): [ 662.920507] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 662.920507] env[61905]: listener.cb(fileno) [ 662.920507] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.920507] env[61905]: result = function(*args, **kwargs) [ 662.920507] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.920507] env[61905]: return func(*args, **kwargs) [ 662.920507] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.920507] env[61905]: raise e [ 662.920507] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.920507] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 662.920507] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.920507] env[61905]: created_port_ids = self._update_ports_for_instance( [ 662.920507] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.920507] env[61905]: with excutils.save_and_reraise_exception(): [ 662.920507] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.920507] env[61905]: self.force_reraise() [ 662.920507] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.920507] env[61905]: raise self.value [ 662.920507] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.920507] env[61905]: updated_port = self._update_port( [ 662.920507] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.920507] env[61905]: _ensure_no_port_binding_failure(port) [ 662.920507] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.920507] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 662.921378] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 662.921378] env[61905]: Removing descriptor: 18 [ 662.921378] env[61905]: ERROR nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Traceback (most recent call last): [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] yield resources [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self.driver.spawn(context, instance, image_meta, [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.921378] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] vm_ref = self.build_virtual_machine(instance, [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] for vif in network_info: [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return self._sync_wrapper(fn, *args, **kwargs) [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self.wait() [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self[:] = self._gt.wait() [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return self._exit_event.wait() [ 662.921724] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] result = hub.switch() [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return self.greenlet.switch() [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] result = function(*args, **kwargs) [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return func(*args, **kwargs) [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] raise e [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] nwinfo = self.network_api.allocate_for_instance( [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.922103] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] created_port_ids = self._update_ports_for_instance( [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] with excutils.save_and_reraise_exception(): [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self.force_reraise() [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] raise self.value [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] updated_port = self._update_port( [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] _ensure_no_port_binding_failure(port) [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.922474] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] raise exception.PortBindingFailed(port_id=port['id']) [ 662.922813] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 662.922813] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] [ 662.922813] env[61905]: INFO nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Terminating instance [ 662.924041] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.924185] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquired lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.924420] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 662.999694] env[61905]: INFO nova.compute.manager [-] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Took 1.04 seconds to deallocate network for instance. [ 663.002874] env[61905]: DEBUG nova.compute.claims [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 663.003082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.169019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-eb372895-68b6-41cb-8ae5-dbfd57387505" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.169278] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 663.169501] env[61905]: DEBUG nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 663.169688] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.192887] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.194542] env[61905]: DEBUG oslo_concurrency.lockutils [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] Releasing lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.194621] env[61905]: DEBUG nova.compute.manager [req-d2910cc6-bb29-489a-97ce-fcceeb7b702b req-351690fb-7f8e-4aaf-a012-315f18f135b5 service nova] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Received event network-vif-deleted-e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 663.234617] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52542465-7d5d-b2db-6c55-8d1c000b4da7, 'name': SearchDatastore_Task, 'duration_secs': 0.008831} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.234917] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.235159] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.235386] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.235531] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.237569] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.237569] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76066c5a-5a20-470c-9a48-5615385c1eb9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.245268] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.245455] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.246182] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d46ee257-19a5-475f-a5b0-29e5a36d7077 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.251450] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 663.251450] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5292b8b0-aef9-3df3-c261-00f4b502f64e" [ 663.251450] env[61905]: _type = "Task" [ 663.251450] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.262790] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5292b8b0-aef9-3df3-c261-00f4b502f64e, 'name': SearchDatastore_Task, 'duration_secs': 0.007771} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.263572] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2ac4085-3bad-42d3-b03a-9671168f3188 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.271792] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 663.271792] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520eb4a7-b08f-c3c6-cf77-614f9f03f73b" [ 663.271792] env[61905]: _type = "Task" [ 663.271792] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.280728] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520eb4a7-b08f-c3c6-cf77-614f9f03f73b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.442357] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c586032-9032-400c-a66f-2c8765d7a442 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.449892] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b008bb38-bc5f-4c8a-ab05-9839a11698dd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.453645] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.487099] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfb7950-30a7-44e7-b6d8-be94073a6711 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.494698] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43578993-6a7e-4e84-b075-e44a652690ad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.507804] env[61905]: DEBUG nova.compute.provider_tree [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.600629] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.695660] env[61905]: DEBUG nova.network.neutron [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.771863] env[61905]: DEBUG nova.compute.manager [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Received event network-changed-809c13b0-2e13-4056-ab7e-e2319e3d2000 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 663.771863] env[61905]: DEBUG nova.compute.manager [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Refreshing instance network info cache due to event network-changed-809c13b0-2e13-4056-ab7e-e2319e3d2000. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 663.771863] env[61905]: DEBUG oslo_concurrency.lockutils [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] Acquiring lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.785685] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520eb4a7-b08f-c3c6-cf77-614f9f03f73b, 'name': SearchDatastore_Task, 'duration_secs': 0.007804} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.785956] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.786224] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.786509] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd838be3-4732-4b0d-8817-1af98f31f20e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.798502] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 663.798502] env[61905]: value = "task-1362333" [ 663.798502] env[61905]: _type = "Task" [ 663.798502] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.808060] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.012661] env[61905]: DEBUG nova.scheduler.client.report [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 664.104088] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Releasing lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.104526] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 664.104764] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 664.105077] env[61905]: DEBUG oslo_concurrency.lockutils [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] Acquired lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.105223] env[61905]: DEBUG nova.network.neutron [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Refreshing network info cache for port 809c13b0-2e13-4056-ab7e-e2319e3d2000 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.106381] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9e29692-5cf5-463c-b889-b271a69f6784 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.117008] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1a22d5-c23e-4935-b67c-b94ed4bb28b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.143564] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 60f2eb1d-de4c-4318-98c3-eb2d411c120b could not be found. [ 664.143683] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 664.143837] env[61905]: INFO nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 664.144102] env[61905]: DEBUG oslo.service.loopingcall [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.144385] env[61905]: DEBUG nova.compute.manager [-] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 664.144472] env[61905]: DEBUG nova.network.neutron [-] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 664.202563] env[61905]: INFO nova.compute.manager [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: eb372895-68b6-41cb-8ae5-dbfd57387505] Took 1.03 seconds to deallocate network for instance. [ 664.308666] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474151} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.308913] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.309121] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.309369] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f330b9d-d3f1-4199-bbe3-e530716a1403 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.315921] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 664.315921] env[61905]: value = "task-1362334" [ 664.315921] env[61905]: _type = "Task" [ 664.315921] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.323302] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362334, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.329734] env[61905]: DEBUG nova.network.neutron [-] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.519678] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.998s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.520351] env[61905]: ERROR nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Traceback (most recent call last): [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self.driver.spawn(context, instance, image_meta, [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] vm_ref = self.build_virtual_machine(instance, [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] vif_infos = vmwarevif.get_vif_info(self._session, [ 664.520351] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] for vif in network_info: [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return self._sync_wrapper(fn, *args, **kwargs) [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self.wait() [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self[:] = self._gt.wait() [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return self._exit_event.wait() [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] result = hub.switch() [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 664.520660] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return self.greenlet.switch() [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] result = function(*args, **kwargs) [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] return func(*args, **kwargs) [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] raise e [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] nwinfo = self.network_api.allocate_for_instance( [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] created_port_ids = self._update_ports_for_instance( [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] with excutils.save_and_reraise_exception(): [ 664.520993] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] self.force_reraise() [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] raise self.value [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] updated_port = self._update_port( [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] _ensure_no_port_binding_failure(port) [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] raise exception.PortBindingFailed(port_id=port['id']) [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] nova.exception.PortBindingFailed: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. [ 664.521314] env[61905]: ERROR nova.compute.manager [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] [ 664.521932] env[61905]: DEBUG nova.compute.utils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 664.522366] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.442s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.524248] env[61905]: INFO nova.compute.claims [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.527882] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Build of instance 949307dd-f8c4-4a79-ad82-99d416d06332 was re-scheduled: Binding failed for port 61353403-af3f-4851-9acf-de7a0ca84efa, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 664.529182] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 664.529182] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquiring lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.529182] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Acquired lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.529182] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.627471] env[61905]: DEBUG nova.network.neutron [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.732484] env[61905]: DEBUG nova.network.neutron [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.825148] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362334, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064063} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.825500] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.826986] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1320aa1-e101-4a8f-bd31-e9ad8a1f00cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.841711] env[61905]: DEBUG nova.network.neutron [-] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.855072] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.857396] env[61905]: INFO nova.compute.manager [-] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Took 0.71 seconds to deallocate network for instance. [ 664.857833] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6992b1fd-f085-4304-abe5-2cb97b538798 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.881901] env[61905]: DEBUG nova.compute.claims [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 664.883299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.886125] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 664.886125] env[61905]: value = "task-1362335" [ 664.886125] env[61905]: _type = "Task" [ 664.886125] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.894348] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.046406] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.054610] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquiring lock "3ad9206a-5562-43a6-87a4-869f93b10933" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.054827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Lock "3ad9206a-5562-43a6-87a4-869f93b10933" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.128736] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.233429] env[61905]: INFO nova.scheduler.client.report [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleted allocations for instance eb372895-68b6-41cb-8ae5-dbfd57387505 [ 665.239929] env[61905]: DEBUG oslo_concurrency.lockutils [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] Releasing lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.240326] env[61905]: DEBUG nova.compute.manager [req-72eca587-b8f0-41da-9f0c-14de9550e97d req-fbb63083-7a67-4900-a824-7db489d48b27 service nova] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Received event network-vif-deleted-809c13b0-2e13-4056-ab7e-e2319e3d2000 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 665.399404] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362335, 'name': ReconfigVM_Task, 'duration_secs': 0.291634} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.399404] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Reconfigured VM instance instance-0000001b to attach disk [datastore2] e6a063b4-d4f8-46ae-89ae-2d66637896ae/e6a063b4-d4f8-46ae-89ae-2d66637896ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.399404] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ab82a48-0cca-444e-8d4a-6646e8fcff47 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.405025] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 665.405025] env[61905]: value = "task-1362336" [ 665.405025] env[61905]: _type = "Task" [ 665.405025] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.413811] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362336, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.632103] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Releasing lock "refresh_cache-949307dd-f8c4-4a79-ad82-99d416d06332" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.632388] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 665.632644] env[61905]: DEBUG nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 665.632826] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 665.656992] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.742796] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9f74fd32-cbb2-4e2d-8a50-d1a4721e4bb6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "eb372895-68b6-41cb-8ae5-dbfd57387505" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.561s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.914534] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362336, 'name': Rename_Task, 'duration_secs': 0.151616} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.914534] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 665.915533] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1abadec0-d38d-46d0-8815-be39333fd390 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.921246] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 665.921246] env[61905]: value = "task-1362337" [ 665.921246] env[61905]: _type = "Task" [ 665.921246] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.922838] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5936ac4a-3f51-45b0-a478-c80e7f861d8d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.932996] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.935716] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c649c8d4-2616-4997-9aa7-134523bb7861 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.965673] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee9ad16-8134-4eb2-8358-71611db9875a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.973207] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815e0eae-798d-4667-85cf-6ec2f8d1d37f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.987939] env[61905]: DEBUG nova.compute.provider_tree [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.159430] env[61905]: DEBUG nova.network.neutron [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.247051] env[61905]: DEBUG nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 666.431442] env[61905]: DEBUG oslo_vmware.api [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362337, 'name': PowerOnVM_Task, 'duration_secs': 0.420941} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.431739] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 666.433028] env[61905]: DEBUG nova.compute.manager [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 666.433028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3c0fd8-90eb-493b-80eb-7f8a8c3cb9cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.490844] env[61905]: DEBUG nova.scheduler.client.report [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 666.662509] env[61905]: INFO nova.compute.manager [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] [instance: 949307dd-f8c4-4a79-ad82-99d416d06332] Took 1.03 seconds to deallocate network for instance. [ 666.778023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.953100] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.995880] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.996531] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 667.000227] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.906s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.511039] env[61905]: DEBUG nova.compute.utils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 667.511775] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 667.511955] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 667.594207] env[61905]: DEBUG nova.policy [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5944fa6762254381aae7cae18d0d075e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c4147df3c2e946ed9f1908695968b809', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 667.678325] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.678325] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.678637] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.678759] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.678952] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.683279] env[61905]: INFO nova.compute.manager [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Terminating instance [ 667.685426] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "refresh_cache-e6a063b4-d4f8-46ae-89ae-2d66637896ae" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.685426] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "refresh_cache-e6a063b4-d4f8-46ae-89ae-2d66637896ae" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.685591] env[61905]: DEBUG nova.network.neutron [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.712185] env[61905]: INFO nova.scheduler.client.report [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Deleted allocations for instance 949307dd-f8c4-4a79-ad82-99d416d06332 [ 667.956883] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfdca14-18f0-4497-b7d2-31b184d070cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.965387] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3e3087-e3a4-4297-a790-c8b814bf599b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.994328] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d8e3e7-1753-4d32-b197-b89b4b2f46ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.002014] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba7af48-63b9-491f-8333-2a651811e6c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.014995] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 668.017774] env[61905]: DEBUG nova.compute.provider_tree [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.046602] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Successfully created port: 96ccd575-825e-42dc-8ec0-df5e3468eaca {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.221234] env[61905]: DEBUG nova.network.neutron [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.223581] env[61905]: DEBUG oslo_concurrency.lockutils [None req-feeaa70b-4705-475d-bef0-de716a520c4c tempest-ServersV294TestFqdnHostnames-1166378715 tempest-ServersV294TestFqdnHostnames-1166378715-project-member] Lock "949307dd-f8c4-4a79-ad82-99d416d06332" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.026s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.331345] env[61905]: DEBUG nova.network.neutron [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.525524] env[61905]: DEBUG nova.scheduler.client.report [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 668.727228] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 668.835023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "refresh_cache-e6a063b4-d4f8-46ae-89ae-2d66637896ae" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.835023] env[61905]: DEBUG nova.compute.manager [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 668.835023] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.835023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e93aeb6-7718-489e-ad11-c411ff2bef23 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.844624] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.845090] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc1eaab6-b87e-4c95-b6ed-64de9a76b75c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.852247] env[61905]: DEBUG oslo_vmware.api [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 668.852247] env[61905]: value = "task-1362338" [ 668.852247] env[61905]: _type = "Task" [ 668.852247] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.861868] env[61905]: DEBUG oslo_vmware.api [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.910585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "0bed6b1c-237b-469d-9f9b-0c4c84550ffb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.910830] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0bed6b1c-237b-469d-9f9b-0c4c84550ffb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.031976] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 669.034184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.034s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.034975] env[61905]: ERROR nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Traceback (most recent call last): [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self.driver.spawn(context, instance, image_meta, [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] vm_ref = self.build_virtual_machine(instance, [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] vif_infos = vmwarevif.get_vif_info(self._session, [ 669.034975] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] for vif in network_info: [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return self._sync_wrapper(fn, *args, **kwargs) [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self.wait() [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self[:] = self._gt.wait() [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return self._exit_event.wait() [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] result = hub.switch() [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 669.035334] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return self.greenlet.switch() [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] result = function(*args, **kwargs) [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] return func(*args, **kwargs) [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] raise e [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] nwinfo = self.network_api.allocate_for_instance( [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] created_port_ids = self._update_ports_for_instance( [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] with excutils.save_and_reraise_exception(): [ 669.035632] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] self.force_reraise() [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] raise self.value [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] updated_port = self._update_port( [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] _ensure_no_port_binding_failure(port) [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] raise exception.PortBindingFailed(port_id=port['id']) [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] nova.exception.PortBindingFailed: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. [ 669.035943] env[61905]: ERROR nova.compute.manager [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] [ 669.038765] env[61905]: DEBUG nova.compute.utils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 669.040074] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.538s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.046456] env[61905]: INFO nova.compute.claims [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 669.056794] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Build of instance b92a6db6-c51a-45c8-9792-d394027bcb7c was re-scheduled: Binding failed for port 7b181c94-3a30-420f-bbbd-8cbb5655b059, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 669.056794] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 669.056794] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.056794] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquired lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.057038] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 669.069053] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.069470] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.069694] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.069914] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.070164] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.070364] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.070566] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.070775] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.071013] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.071216] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.071421] env[61905]: DEBUG nova.virt.hardware [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.072608] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac80158-7fcb-4ff9-8c2a-bfac5bdbddb0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.081443] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8860fdfd-fbc7-4f05-9909-61c63aac3ce7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.185898] env[61905]: DEBUG nova.compute.manager [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Received event network-changed-96ccd575-825e-42dc-8ec0-df5e3468eaca {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 669.186245] env[61905]: DEBUG nova.compute.manager [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Refreshing instance network info cache due to event network-changed-96ccd575-825e-42dc-8ec0-df5e3468eaca. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 669.186581] env[61905]: DEBUG oslo_concurrency.lockutils [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] Acquiring lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.186927] env[61905]: DEBUG oslo_concurrency.lockutils [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] Acquired lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.186927] env[61905]: DEBUG nova.network.neutron [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Refreshing network info cache for port 96ccd575-825e-42dc-8ec0-df5e3468eaca {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.250399] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.308178] env[61905]: ERROR nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 669.308178] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 669.308178] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.308178] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.308178] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.308178] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.308178] env[61905]: ERROR nova.compute.manager raise self.value [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.308178] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 669.308178] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.308178] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 669.308770] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.308770] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 669.308770] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 669.308770] env[61905]: ERROR nova.compute.manager [ 669.308770] env[61905]: Traceback (most recent call last): [ 669.308770] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 669.308770] env[61905]: listener.cb(fileno) [ 669.308770] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.308770] env[61905]: result = function(*args, **kwargs) [ 669.308770] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 669.308770] env[61905]: return func(*args, **kwargs) [ 669.308770] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 669.308770] env[61905]: raise e [ 669.308770] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 669.308770] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 669.308770] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.308770] env[61905]: created_port_ids = self._update_ports_for_instance( [ 669.308770] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.308770] env[61905]: with excutils.save_and_reraise_exception(): [ 669.308770] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.308770] env[61905]: self.force_reraise() [ 669.308770] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.308770] env[61905]: raise self.value [ 669.308770] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.308770] env[61905]: updated_port = self._update_port( [ 669.308770] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.308770] env[61905]: _ensure_no_port_binding_failure(port) [ 669.308770] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.308770] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 669.309707] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 669.309707] env[61905]: Removing descriptor: 18 [ 669.309707] env[61905]: ERROR nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] Traceback (most recent call last): [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] yield resources [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self.driver.spawn(context, instance, image_meta, [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 669.309707] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] vm_ref = self.build_virtual_machine(instance, [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] for vif in network_info: [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return self._sync_wrapper(fn, *args, **kwargs) [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self.wait() [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self[:] = self._gt.wait() [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return self._exit_event.wait() [ 669.310079] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] result = hub.switch() [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return self.greenlet.switch() [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] result = function(*args, **kwargs) [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return func(*args, **kwargs) [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] raise e [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] nwinfo = self.network_api.allocate_for_instance( [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 669.310458] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] created_port_ids = self._update_ports_for_instance( [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] with excutils.save_and_reraise_exception(): [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self.force_reraise() [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] raise self.value [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] updated_port = self._update_port( [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] _ensure_no_port_binding_failure(port) [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 669.310836] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] raise exception.PortBindingFailed(port_id=port['id']) [ 669.311142] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 669.311142] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] [ 669.311142] env[61905]: INFO nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Terminating instance [ 669.311677] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquiring lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.363587] env[61905]: DEBUG oslo_vmware.api [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362338, 'name': PowerOffVM_Task, 'duration_secs': 0.201631} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.363846] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.364405] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.364706] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-036ca969-e30c-45e9-b8e2-a3104213def0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.388844] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.388844] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.388844] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleting the datastore file [datastore2] e6a063b4-d4f8-46ae-89ae-2d66637896ae {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.389071] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e56b82f-dc40-4f6e-a2ae-f60cff403a11 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.395186] env[61905]: DEBUG oslo_vmware.api [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 669.395186] env[61905]: value = "task-1362340" [ 669.395186] env[61905]: _type = "Task" [ 669.395186] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.403065] env[61905]: DEBUG oslo_vmware.api [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.587680] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.714168] env[61905]: DEBUG nova.network.neutron [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 669.752010] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.837296] env[61905]: DEBUG nova.network.neutron [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.905668] env[61905]: DEBUG oslo_vmware.api [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095955} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.905932] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.906128] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.906303] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.906479] env[61905]: INFO nova.compute.manager [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Took 1.07 seconds to destroy the instance on the hypervisor. [ 669.906709] env[61905]: DEBUG oslo.service.loopingcall [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 669.906890] env[61905]: DEBUG nova.compute.manager [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 669.906983] env[61905]: DEBUG nova.network.neutron [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.930224] env[61905]: DEBUG nova.network.neutron [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.256167] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Releasing lock "refresh_cache-b92a6db6-c51a-45c8-9792-d394027bcb7c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.256167] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 670.256167] env[61905]: DEBUG nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 670.256167] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 670.274696] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.340745] env[61905]: DEBUG oslo_concurrency.lockutils [req-57a361f5-6ac7-4dce-a4a2-bb97bbb4aa5c req-7da50033-9516-4196-a4f7-da8be7439be8 service nova] Releasing lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.341149] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquired lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.341333] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.432856] env[61905]: DEBUG nova.network.neutron [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.476377] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15be8ff-6d50-4963-9f5c-b32678e8a14f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.484250] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2939d3-f5f1-4803-bf73-fd4901fe4306 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.516589] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6ccf41-9f0e-44d9-b06d-f62b11cb7b62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.524452] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9954544d-10cc-458d-a811-fdcb161c9f74 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.539216] env[61905]: DEBUG nova.compute.provider_tree [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.778374] env[61905]: DEBUG nova.network.neutron [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.862705] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.933103] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.935580] env[61905]: INFO nova.compute.manager [-] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Took 1.03 seconds to deallocate network for instance. [ 671.042721] env[61905]: DEBUG nova.scheduler.client.report [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 671.214325] env[61905]: DEBUG nova.compute.manager [req-37317e75-360a-40a2-b4e4-a172aaf5ddb6 req-1f447233-fc43-452d-969e-7c37157c3a24 service nova] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Received event network-vif-deleted-96ccd575-825e-42dc-8ec0-df5e3468eaca {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 671.281697] env[61905]: INFO nova.compute.manager [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: b92a6db6-c51a-45c8-9792-d394027bcb7c] Took 1.03 seconds to deallocate network for instance. [ 671.435524] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Releasing lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.436099] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 671.436307] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 671.436631] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da535ec1-ac2c-486f-82ce-336269fccea2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.444251] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.447176] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d4a4ba-d8f2-4aef-a0de-c1d33eee226a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.471811] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ded96da7-74a4-4364-8424-22000411f5fe could not be found. [ 671.471811] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 671.471811] env[61905]: INFO nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Took 0.03 seconds to destroy the instance on the hypervisor. [ 671.471811] env[61905]: DEBUG oslo.service.loopingcall [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.471811] env[61905]: DEBUG nova.compute.manager [-] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 671.471811] env[61905]: DEBUG nova.network.neutron [-] [instance: ded96da7-74a4-4364-8424-22000411f5fe] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.489457] env[61905]: DEBUG nova.network.neutron [-] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.551134] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.551642] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 671.554153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.108s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.555880] env[61905]: INFO nova.compute.claims [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 671.992221] env[61905]: DEBUG nova.network.neutron [-] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.060468] env[61905]: DEBUG nova.compute.utils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 672.063618] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 672.064091] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 672.277096] env[61905]: DEBUG nova.policy [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '127aec483d144a068c156781a8ac4f2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6004364faeec4f4b8327b4962a864f74', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 672.328292] env[61905]: INFO nova.scheduler.client.report [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Deleted allocations for instance b92a6db6-c51a-45c8-9792-d394027bcb7c [ 672.495075] env[61905]: INFO nova.compute.manager [-] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Took 1.02 seconds to deallocate network for instance. [ 672.497514] env[61905]: DEBUG nova.compute.claims [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 672.497690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.564537] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 672.624131] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Successfully created port: fe190d5e-c458-4a01-994f-4b475d299d78 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.837894] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a61c777-a556-4f79-8744-840a051454a0 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "b92a6db6-c51a-45c8-9792-d394027bcb7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.600s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.982135] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36cf41e-1d2d-4608-8913-89c55b0e63d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.991020] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0e14f0-b987-44cf-a06b-9044d9dc925c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.026229] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67581d9e-f826-48bc-b7be-015092dd2fa6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.034171] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51abfb2-efae-42ac-866c-34a015f3e544 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.047389] env[61905]: DEBUG nova.compute.provider_tree [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.081377] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "8ef98f37-9059-4658-9679-fb50dc812eb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.081600] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "8ef98f37-9059-4658-9679-fb50dc812eb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.340868] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 673.550612] env[61905]: DEBUG nova.scheduler.client.report [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 673.582405] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 673.621380] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:14:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='eddbe4b2-0d1c-4f9d-8d59-cfc8c92a22e7',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1036389906',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 673.621587] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 673.621732] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 673.621907] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 673.622067] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 673.622290] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 673.622419] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 673.622575] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 673.622792] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 673.623060] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 673.624098] env[61905]: DEBUG nova.virt.hardware [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 673.626694] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e967823-d254-4467-af71-a993c23dd359 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.638910] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c5e30d-3871-40db-b689-45dc1f82ccef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.652201] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "60e68738-a333-44b2-a1e8-0b3da728059e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.652201] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "60e68738-a333-44b2-a1e8-0b3da728059e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.674522] env[61905]: DEBUG nova.compute.manager [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Received event network-changed-fe190d5e-c458-4a01-994f-4b475d299d78 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 673.674676] env[61905]: DEBUG nova.compute.manager [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Refreshing instance network info cache due to event network-changed-fe190d5e-c458-4a01-994f-4b475d299d78. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 673.674885] env[61905]: DEBUG oslo_concurrency.lockutils [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] Acquiring lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.675033] env[61905]: DEBUG oslo_concurrency.lockutils [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] Acquired lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.675194] env[61905]: DEBUG nova.network.neutron [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Refreshing network info cache for port fe190d5e-c458-4a01-994f-4b475d299d78 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 673.759948] env[61905]: ERROR nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 673.759948] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.759948] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.759948] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.759948] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.759948] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.759948] env[61905]: ERROR nova.compute.manager raise self.value [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.759948] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 673.759948] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.759948] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 673.760559] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.760559] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 673.760559] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 673.760559] env[61905]: ERROR nova.compute.manager [ 673.760559] env[61905]: Traceback (most recent call last): [ 673.760559] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 673.760559] env[61905]: listener.cb(fileno) [ 673.760559] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 673.760559] env[61905]: result = function(*args, **kwargs) [ 673.760559] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 673.760559] env[61905]: return func(*args, **kwargs) [ 673.760559] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 673.760559] env[61905]: raise e [ 673.760559] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.760559] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 673.760559] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.760559] env[61905]: created_port_ids = self._update_ports_for_instance( [ 673.760559] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.760559] env[61905]: with excutils.save_and_reraise_exception(): [ 673.760559] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.760559] env[61905]: self.force_reraise() [ 673.760559] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.760559] env[61905]: raise self.value [ 673.760559] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.760559] env[61905]: updated_port = self._update_port( [ 673.760559] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.760559] env[61905]: _ensure_no_port_binding_failure(port) [ 673.760559] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.760559] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 673.761482] env[61905]: nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 673.761482] env[61905]: Removing descriptor: 17 [ 673.761482] env[61905]: ERROR nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Traceback (most recent call last): [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] yield resources [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self.driver.spawn(context, instance, image_meta, [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self._vmops.spawn(context, instance, image_meta, injected_files, [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 673.761482] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] vm_ref = self.build_virtual_machine(instance, [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] vif_infos = vmwarevif.get_vif_info(self._session, [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] for vif in network_info: [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return self._sync_wrapper(fn, *args, **kwargs) [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self.wait() [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self[:] = self._gt.wait() [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return self._exit_event.wait() [ 673.761810] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] result = hub.switch() [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return self.greenlet.switch() [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] result = function(*args, **kwargs) [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return func(*args, **kwargs) [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] raise e [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] nwinfo = self.network_api.allocate_for_instance( [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 673.762159] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] created_port_ids = self._update_ports_for_instance( [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] with excutils.save_and_reraise_exception(): [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self.force_reraise() [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] raise self.value [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] updated_port = self._update_port( [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] _ensure_no_port_binding_failure(port) [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.762516] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] raise exception.PortBindingFailed(port_id=port['id']) [ 673.762870] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 673.762870] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] [ 673.762870] env[61905]: INFO nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Terminating instance [ 673.766597] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.865636] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.062016] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.063080] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 674.066184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.900s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.067683] env[61905]: INFO nova.compute.claims [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.146892] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "a4a03b8a-3206-4684-9d85-0e60ac643175" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.148299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.199891] env[61905]: DEBUG nova.network.neutron [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.267789] env[61905]: DEBUG nova.network.neutron [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.575020] env[61905]: DEBUG nova.compute.utils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 674.576538] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 674.576888] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 674.630475] env[61905]: DEBUG nova.policy [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f27bcad7ab3b4e0e98065f24300f9425', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30edd7bc94ee492cb7f4e4f388e45b8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 674.774187] env[61905]: DEBUG oslo_concurrency.lockutils [req-10393964-4cce-4570-b50b-fc6964812ef7 req-aeca77b1-f633-4f25-8319-c813bf3afa32 service nova] Releasing lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.774187] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquired lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.774187] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 674.951321] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Successfully created port: d7b61672-7a84-4801-ab2e-099decf0c67f {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.082774] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 675.291809] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 675.384223] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.560682] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112bd359-1b4a-4553-8edc-07f608fd908d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.567866] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ecbe4b-e054-40f4-b50d-0c2dc62559d5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.600615] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0cef77-0f84-4e5d-966e-66f7716796e3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.607818] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3fed44-2a30-41c0-8647-29dd7e633f61 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.623758] env[61905]: DEBUG nova.compute.provider_tree [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.850496] env[61905]: DEBUG nova.compute.manager [req-ee854be9-136c-44ba-809b-cae97bd74672 req-1eb6a60a-1a51-4f90-ac67-2b527c87c8bc service nova] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Received event network-vif-deleted-fe190d5e-c458-4a01-994f-4b475d299d78 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 675.893020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Releasing lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.893020] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 675.893020] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 675.893020] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1af8ca94-7f42-4e00-8bea-9524543d4a07 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.901897] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fb0fc5-dcfb-45ec-a63a-fa07c1dd5ce0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.926435] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434 could not be found. [ 675.926435] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.926435] env[61905]: INFO nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Took 0.03 seconds to destroy the instance on the hypervisor. [ 675.926435] env[61905]: DEBUG oslo.service.loopingcall [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 675.926679] env[61905]: DEBUG nova.compute.manager [-] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 675.926679] env[61905]: DEBUG nova.network.neutron [-] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.947748] env[61905]: DEBUG nova.network.neutron [-] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.104419] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 676.128137] env[61905]: DEBUG nova.scheduler.client.report [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.137457] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.137699] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.137852] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.138168] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.138338] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.138488] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.138691] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.138867] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.139049] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.139214] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.139382] env[61905]: DEBUG nova.virt.hardware [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.140262] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05a3cb0-9f33-49a7-ae67-382c457b3064 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.148400] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8368d361-16fd-4d90-8ce5-11562fc0fe75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.258985] env[61905]: ERROR nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 676.258985] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.258985] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 676.258985] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 676.258985] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.258985] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.258985] env[61905]: ERROR nova.compute.manager raise self.value [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 676.258985] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 676.258985] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.258985] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 676.259425] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.259425] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 676.259425] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 676.259425] env[61905]: ERROR nova.compute.manager [ 676.259425] env[61905]: Traceback (most recent call last): [ 676.259425] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 676.259425] env[61905]: listener.cb(fileno) [ 676.259425] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.259425] env[61905]: result = function(*args, **kwargs) [ 676.259425] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 676.259425] env[61905]: return func(*args, **kwargs) [ 676.259425] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 676.259425] env[61905]: raise e [ 676.259425] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.259425] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 676.259425] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 676.259425] env[61905]: created_port_ids = self._update_ports_for_instance( [ 676.259425] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 676.259425] env[61905]: with excutils.save_and_reraise_exception(): [ 676.259425] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.259425] env[61905]: self.force_reraise() [ 676.259425] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.259425] env[61905]: raise self.value [ 676.259425] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 676.259425] env[61905]: updated_port = self._update_port( [ 676.259425] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.259425] env[61905]: _ensure_no_port_binding_failure(port) [ 676.259425] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.259425] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 676.260133] env[61905]: nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 676.260133] env[61905]: Removing descriptor: 17 [ 676.260133] env[61905]: ERROR nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Traceback (most recent call last): [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] yield resources [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self.driver.spawn(context, instance, image_meta, [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self._vmops.spawn(context, instance, image_meta, injected_files, [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 676.260133] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] vm_ref = self.build_virtual_machine(instance, [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] vif_infos = vmwarevif.get_vif_info(self._session, [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] for vif in network_info: [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return self._sync_wrapper(fn, *args, **kwargs) [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self.wait() [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self[:] = self._gt.wait() [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return self._exit_event.wait() [ 676.260528] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] result = hub.switch() [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return self.greenlet.switch() [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] result = function(*args, **kwargs) [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return func(*args, **kwargs) [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] raise e [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] nwinfo = self.network_api.allocate_for_instance( [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 676.260854] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] created_port_ids = self._update_ports_for_instance( [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] with excutils.save_and_reraise_exception(): [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self.force_reraise() [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] raise self.value [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] updated_port = self._update_port( [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] _ensure_no_port_binding_failure(port) [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.261197] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] raise exception.PortBindingFailed(port_id=port['id']) [ 676.261540] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 676.261540] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] [ 676.261540] env[61905]: INFO nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Terminating instance [ 676.263517] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.264106] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.264106] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 676.449202] env[61905]: DEBUG nova.network.neutron [-] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.561385] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "d31570f0-7662-4e13-9dee-51dc66728acc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.561612] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "d31570f0-7662-4e13-9dee-51dc66728acc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.636299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.636840] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 676.639419] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.618s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.785413] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 676.864343] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.953270] env[61905]: INFO nova.compute.manager [-] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Took 1.03 seconds to deallocate network for instance. [ 676.955730] env[61905]: DEBUG nova.compute.claims [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 676.955933] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.144863] env[61905]: DEBUG nova.compute.utils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 677.149534] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 677.149534] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 677.194045] env[61905]: DEBUG nova.policy [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ca67104cdbd4ac9be9a57bb19846925', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7af9072624d04f669e8183581e6ca50a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 677.367328] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.367588] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 677.367889] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 677.368256] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d74af471-a2b6-4fbc-83fe-fda7b2034c80 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.377497] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e022bbc9-ff9c-41ee-a2a0-ef06d074afae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.404437] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2a778ae5-37be-4479-b7ff-4468d0433c86 could not be found. [ 677.404668] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 677.404858] env[61905]: INFO nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Took 0.04 seconds to destroy the instance on the hypervisor. [ 677.405286] env[61905]: DEBUG oslo.service.loopingcall [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 677.405677] env[61905]: DEBUG nova.compute.manager [-] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 677.405677] env[61905]: DEBUG nova.network.neutron [-] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 677.439511] env[61905]: DEBUG nova.network.neutron [-] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 677.530724] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Successfully created port: 26e8448d-f7c7-404a-901b-ac6478cb284d {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.615322] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178fe4e3-457a-447e-bc1d-7b2e38fd0a47 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.623018] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4253e31f-2f7b-46cb-9cb9-3873d30ace1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.655416] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 677.660264] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccaa741-ca77-4ed7-a296-bfa0a2b0771b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.668366] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d5786a-8945-4201-b888-d95f8a559b60 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.682624] env[61905]: DEBUG nova.compute.provider_tree [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.876795] env[61905]: DEBUG nova.compute.manager [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Received event network-changed-d7b61672-7a84-4801-ab2e-099decf0c67f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.876979] env[61905]: DEBUG nova.compute.manager [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Refreshing instance network info cache due to event network-changed-d7b61672-7a84-4801-ab2e-099decf0c67f. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 677.877205] env[61905]: DEBUG oslo_concurrency.lockutils [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] Acquiring lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.877343] env[61905]: DEBUG oslo_concurrency.lockutils [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] Acquired lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.877497] env[61905]: DEBUG nova.network.neutron [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Refreshing network info cache for port d7b61672-7a84-4801-ab2e-099decf0c67f {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 677.943967] env[61905]: DEBUG nova.network.neutron [-] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.185326] env[61905]: DEBUG nova.scheduler.client.report [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.401387] env[61905]: DEBUG nova.network.neutron [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.447607] env[61905]: INFO nova.compute.manager [-] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Took 1.04 seconds to deallocate network for instance. [ 678.451113] env[61905]: DEBUG nova.compute.claims [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 678.451318] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.547209] env[61905]: DEBUG nova.network.neutron [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.570253] env[61905]: ERROR nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 678.570253] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.570253] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.570253] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.570253] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.570253] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.570253] env[61905]: ERROR nova.compute.manager raise self.value [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.570253] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 678.570253] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.570253] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 678.570680] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.570680] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 678.570680] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 678.570680] env[61905]: ERROR nova.compute.manager [ 678.570680] env[61905]: Traceback (most recent call last): [ 678.570680] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 678.570680] env[61905]: listener.cb(fileno) [ 678.570680] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.570680] env[61905]: result = function(*args, **kwargs) [ 678.570680] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.570680] env[61905]: return func(*args, **kwargs) [ 678.570680] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.570680] env[61905]: raise e [ 678.570680] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.570680] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 678.570680] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.570680] env[61905]: created_port_ids = self._update_ports_for_instance( [ 678.570680] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.570680] env[61905]: with excutils.save_and_reraise_exception(): [ 678.570680] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.570680] env[61905]: self.force_reraise() [ 678.570680] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.570680] env[61905]: raise self.value [ 678.570680] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.570680] env[61905]: updated_port = self._update_port( [ 678.570680] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.570680] env[61905]: _ensure_no_port_binding_failure(port) [ 678.570680] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.570680] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 678.571524] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 678.571524] env[61905]: Removing descriptor: 17 [ 678.673634] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 678.691308] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.052s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.691905] env[61905]: ERROR nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Traceback (most recent call last): [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self.driver.spawn(context, instance, image_meta, [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] vm_ref = self.build_virtual_machine(instance, [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.691905] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] for vif in network_info: [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] return self._sync_wrapper(fn, *args, **kwargs) [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self.wait() [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self[:] = self._gt.wait() [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] return self._exit_event.wait() [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] current.throw(*self._exc) [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.692242] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] result = function(*args, **kwargs) [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] return func(*args, **kwargs) [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] raise e [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] nwinfo = self.network_api.allocate_for_instance( [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] created_port_ids = self._update_ports_for_instance( [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] with excutils.save_and_reraise_exception(): [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] self.force_reraise() [ 678.692559] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] raise self.value [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] updated_port = self._update_port( [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] _ensure_no_port_binding_failure(port) [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] raise exception.PortBindingFailed(port_id=port['id']) [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] nova.exception.PortBindingFailed: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. [ 678.692912] env[61905]: ERROR nova.compute.manager [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] [ 678.692912] env[61905]: DEBUG nova.compute.utils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 678.693834] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.691s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.698441] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Build of instance 362c9148-9f78-4700-9c6f-7fd0eaef4bd7 was re-scheduled: Binding failed for port 9c2a282d-ec0f-4dd6-8aac-693e4551b443, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 678.698889] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 678.699131] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquiring lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.699277] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Acquired lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.699439] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 678.707134] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 678.707364] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 678.707517] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.707692] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 678.707836] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.707977] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 678.708231] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 678.708693] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 678.708693] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 678.708797] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 678.708916] env[61905]: DEBUG nova.virt.hardware [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.709846] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1c694b-d91d-41b0-8322-8a8e1d44a686 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.718567] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c477ace-011e-4067-80f7-73cd18416c8a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.733354] env[61905]: ERROR nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Traceback (most recent call last): [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] yield resources [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self.driver.spawn(context, instance, image_meta, [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self._vmops.spawn(context, instance, image_meta, injected_files, [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] vm_ref = self.build_virtual_machine(instance, [ 678.733354] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] vif_infos = vmwarevif.get_vif_info(self._session, [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] for vif in network_info: [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] return self._sync_wrapper(fn, *args, **kwargs) [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self.wait() [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self[:] = self._gt.wait() [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] return self._exit_event.wait() [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 678.733770] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] current.throw(*self._exc) [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] result = function(*args, **kwargs) [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] return func(*args, **kwargs) [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] raise e [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] nwinfo = self.network_api.allocate_for_instance( [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] created_port_ids = self._update_ports_for_instance( [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] with excutils.save_and_reraise_exception(): [ 678.734275] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self.force_reraise() [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] raise self.value [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] updated_port = self._update_port( [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] _ensure_no_port_binding_failure(port) [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] raise exception.PortBindingFailed(port_id=port['id']) [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 678.734718] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] [ 678.734718] env[61905]: INFO nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Terminating instance [ 678.735648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.735807] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.735968] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.054574] env[61905]: DEBUG oslo_concurrency.lockutils [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] Releasing lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.054855] env[61905]: DEBUG nova.compute.manager [req-71627632-a90d-41ab-84a7-b67406b82fa5 req-950e40fb-9345-405f-b687-cb74baafe0b1 service nova] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Received event network-vif-deleted-d7b61672-7a84-4801-ab2e-099decf0c67f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.225442] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.263589] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 679.362182] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.382829] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.618087] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf2feba-5e32-4baf-8449-775f53777aff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.626852] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82252687-fe6c-4bc9-bdec-bdb0cb6b66d3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.659568] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee00a77e-7e82-45cd-ad62-b52557dad39c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.667667] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fdf60c-d254-4338-958b-2500c8ba6c99 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.681419] env[61905]: DEBUG nova.compute.provider_tree [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.864487] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Releasing lock "refresh_cache-362c9148-9f78-4700-9c6f-7fd0eaef4bd7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.864757] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 679.864952] env[61905]: DEBUG nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.865143] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.885533] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.885929] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 679.886135] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.886431] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61f13dff-5440-47dd-a095-073ff30260ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.895654] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621b58bd-6e51-4bd4-8d84-568b99606ec1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.916930] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03f9b48c-4bd1-4018-b34f-267e1575c753 could not be found. [ 679.917182] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.917363] env[61905]: INFO nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Took 0.03 seconds to destroy the instance on the hypervisor. [ 679.917635] env[61905]: DEBUG oslo.service.loopingcall [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.917861] env[61905]: DEBUG nova.compute.manager [-] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.917956] env[61905]: DEBUG nova.network.neutron [-] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 680.036276] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.040667] env[61905]: DEBUG nova.network.neutron [-] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.050943] env[61905]: DEBUG nova.compute.manager [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Received event network-changed-26e8448d-f7c7-404a-901b-ac6478cb284d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.051056] env[61905]: DEBUG nova.compute.manager [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Refreshing instance network info cache due to event network-changed-26e8448d-f7c7-404a-901b-ac6478cb284d. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 680.051260] env[61905]: DEBUG oslo_concurrency.lockutils [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] Acquiring lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.051413] env[61905]: DEBUG oslo_concurrency.lockutils [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] Acquired lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.051549] env[61905]: DEBUG nova.network.neutron [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Refreshing network info cache for port 26e8448d-f7c7-404a-901b-ac6478cb284d {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.185142] env[61905]: DEBUG nova.scheduler.client.report [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.539802] env[61905]: DEBUG nova.network.neutron [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.542638] env[61905]: DEBUG nova.network.neutron [-] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.571038] env[61905]: DEBUG nova.network.neutron [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.649389] env[61905]: DEBUG nova.network.neutron [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.690069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.996s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.690738] env[61905]: ERROR nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Traceback (most recent call last): [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self.driver.spawn(context, instance, image_meta, [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] vm_ref = self.build_virtual_machine(instance, [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.690738] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] for vif in network_info: [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return self._sync_wrapper(fn, *args, **kwargs) [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self.wait() [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self[:] = self._gt.wait() [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return self._exit_event.wait() [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] result = hub.switch() [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 680.691059] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return self.greenlet.switch() [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] result = function(*args, **kwargs) [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] return func(*args, **kwargs) [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] raise e [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] nwinfo = self.network_api.allocate_for_instance( [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] created_port_ids = self._update_ports_for_instance( [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] with excutils.save_and_reraise_exception(): [ 680.691369] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] self.force_reraise() [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] raise self.value [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] updated_port = self._update_port( [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] _ensure_no_port_binding_failure(port) [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] raise exception.PortBindingFailed(port_id=port['id']) [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] nova.exception.PortBindingFailed: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. [ 680.691682] env[61905]: ERROR nova.compute.manager [instance: 105aed8e-4268-4553-9564-1540cb5176dc] [ 680.691949] env[61905]: DEBUG nova.compute.utils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 680.692574] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.810s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.697061] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Build of instance 105aed8e-4268-4553-9564-1540cb5176dc was re-scheduled: Binding failed for port e5f5ee6b-5e6e-4682-904c-ed2c750bd3c3, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 680.697491] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 680.697719] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquiring lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.697863] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Acquired lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.698026] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.043786] env[61905]: INFO nova.compute.manager [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] [instance: 362c9148-9f78-4700-9c6f-7fd0eaef4bd7] Took 1.18 seconds to deallocate network for instance. [ 681.046622] env[61905]: INFO nova.compute.manager [-] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Took 1.13 seconds to deallocate network for instance. [ 681.048869] env[61905]: DEBUG nova.compute.claims [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 681.048869] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.152735] env[61905]: DEBUG oslo_concurrency.lockutils [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] Releasing lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.152992] env[61905]: DEBUG nova.compute.manager [req-c2568c29-ff20-4708-8718-31eb35ec2e96 req-428d26b2-bfa7-4d02-8e21-b6df0a1726a1 service nova] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Received event network-vif-deleted-26e8448d-f7c7-404a-901b-ac6478cb284d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 681.217866] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.320970] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.562663] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae232a13-92fc-4b38-9b91-2518688f8391 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.570382] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15da9f27-d9ad-4c51-9f31-e0104a69e636 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.602986] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fa8347-baf0-4389-973f-dd2f4c19a1a4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.609268] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10aaeec9-b872-40ec-bea9-0bc8de732a77 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.623050] env[61905]: DEBUG nova.compute.provider_tree [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.823982] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Releasing lock "refresh_cache-105aed8e-4268-4553-9564-1540cb5176dc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.824307] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 681.824494] env[61905]: DEBUG nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 681.824738] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.840317] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.081140] env[61905]: INFO nova.scheduler.client.report [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Deleted allocations for instance 362c9148-9f78-4700-9c6f-7fd0eaef4bd7 [ 682.126562] env[61905]: DEBUG nova.scheduler.client.report [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 682.343421] env[61905]: DEBUG nova.network.neutron [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.589488] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60ef3272-53a4-4f19-831e-2eaf2a31c0b6 tempest-ServersTestBootFromVolume-1731284574 tempest-ServersTestBootFromVolume-1731284574-project-member] Lock "362c9148-9f78-4700-9c6f-7fd0eaef4bd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.169s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.636214] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.943s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.636931] env[61905]: ERROR nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Traceback (most recent call last): [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self.driver.spawn(context, instance, image_meta, [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] vm_ref = self.build_virtual_machine(instance, [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] vif_infos = vmwarevif.get_vif_info(self._session, [ 682.636931] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] for vif in network_info: [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return self._sync_wrapper(fn, *args, **kwargs) [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self.wait() [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self[:] = self._gt.wait() [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return self._exit_event.wait() [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] result = hub.switch() [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 682.637381] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return self.greenlet.switch() [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] result = function(*args, **kwargs) [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] return func(*args, **kwargs) [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] raise e [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] nwinfo = self.network_api.allocate_for_instance( [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] created_port_ids = self._update_ports_for_instance( [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] with excutils.save_and_reraise_exception(): [ 682.637805] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] self.force_reraise() [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] raise self.value [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] updated_port = self._update_port( [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] _ensure_no_port_binding_failure(port) [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] raise exception.PortBindingFailed(port_id=port['id']) [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] nova.exception.PortBindingFailed: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. [ 682.638159] env[61905]: ERROR nova.compute.manager [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] [ 682.638446] env[61905]: DEBUG nova.compute.utils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 682.639121] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.864s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.640575] env[61905]: INFO nova.compute.claims [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.643908] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Build of instance 60f2eb1d-de4c-4318-98c3-eb2d411c120b was re-scheduled: Binding failed for port 809c13b0-2e13-4056-ab7e-e2319e3d2000, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 682.643908] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 682.644071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.644071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquired lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.644229] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.848327] env[61905]: INFO nova.compute.manager [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] [instance: 105aed8e-4268-4553-9564-1540cb5176dc] Took 1.02 seconds to deallocate network for instance. [ 683.093319] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 683.174987] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.267077] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.617077] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.770060] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Releasing lock "refresh_cache-60f2eb1d-de4c-4318-98c3-eb2d411c120b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.770296] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 683.770479] env[61905]: DEBUG nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 683.770669] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 683.794519] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.886075] env[61905]: INFO nova.scheduler.client.report [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Deleted allocations for instance 105aed8e-4268-4553-9564-1540cb5176dc [ 684.105934] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db8167c-07a1-40f3-a019-72e5e3ff6d19 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.113497] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75414c7-e96d-4a82-97cd-33ba59bc1dcc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.148117] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef96822-5da9-4c43-851c-c88a97540cae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.156305] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38414450-a288-42dd-bc85-3440363b13ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.169545] env[61905]: DEBUG nova.compute.provider_tree [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.292284] env[61905]: DEBUG nova.network.neutron [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.395796] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d4bc97c-2384-4e81-9170-33afd234dc83 tempest-AttachInterfacesV270Test-2104108953 tempest-AttachInterfacesV270Test-2104108953-project-member] Lock "105aed8e-4268-4553-9564-1540cb5176dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 124.744s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.672227] env[61905]: DEBUG nova.scheduler.client.report [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 684.795352] env[61905]: INFO nova.compute.manager [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: 60f2eb1d-de4c-4318-98c3-eb2d411c120b] Took 1.02 seconds to deallocate network for instance. [ 684.899981] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 685.180023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.180023] env[61905]: DEBUG nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 685.182639] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.230s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.182697] env[61905]: DEBUG nova.objects.instance [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 685.429437] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.689022] env[61905]: DEBUG nova.compute.utils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 685.698773] env[61905]: DEBUG nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 685.833139] env[61905]: INFO nova.scheduler.client.report [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Deleted allocations for instance 60f2eb1d-de4c-4318-98c3-eb2d411c120b [ 686.196516] env[61905]: DEBUG nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 686.200119] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4fa0f94c-f2ea-4beb-a198-8db2313101f8 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.203046] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.951s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.203046] env[61905]: INFO nova.compute.claims [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.348485] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1603e8df-c68f-4a14-8559-de199686fed5 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "60f2eb1d-de4c-4318-98c3-eb2d411c120b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.873s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.851541] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 687.209151] env[61905]: DEBUG nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 687.252441] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.252666] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.253639] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.253956] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.254096] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.254247] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.254597] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.254875] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.254988] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.255146] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.255317] env[61905]: DEBUG nova.virt.hardware [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.256288] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbfee3a3-4dde-4a0e-a52e-61998945d1dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.273576] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dec2cc3-c718-4c36-b950-a342ad6cff52 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.290182] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.294450] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Creating folder: Project (fe45fb723738464cb7a1d7ee4372cfcf). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.296636] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e67fae9-28a1-43ab-bb52-faeb398e2cbe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.308529] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Created folder: Project (fe45fb723738464cb7a1d7ee4372cfcf) in parent group-v289968. [ 687.308743] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Creating folder: Instances. Parent ref: group-v289990. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.309044] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1dbf275-3b48-4e92-b6d1-8f0bbd9ebc49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.318614] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Created folder: Instances in parent group-v289990. [ 687.319344] env[61905]: DEBUG oslo.service.loopingcall [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 687.319598] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.320059] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9f93794-ff80-4a2a-86cf-54c21129d002 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.341238] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.341238] env[61905]: value = "task-1362344" [ 687.341238] env[61905]: _type = "Task" [ 687.341238] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.349519] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362344, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.381713] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.722237] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "fed05097-de84-4617-bf9e-7fc116ebc56e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.723497] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.758170] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a14ffd4-6f0b-40e9-8ad0-5c073281f7b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.768765] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3808567-5f83-4da3-890d-7e6b8739ce63 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.804927] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f84709-11db-4ae6-b6d6-11743f2d5952 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.815502] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c193a9-2893-4553-80f9-835afd5f9b40 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.829366] env[61905]: DEBUG nova.compute.provider_tree [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.851843] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362344, 'name': CreateVM_Task, 'duration_secs': 0.265186} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.852170] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 687.852761] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.852761] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.853191] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 687.853463] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a751bbe-6e19-4248-8ef7-4978d59c7d30 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.858040] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 687.858040] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f35ad7-a74d-4644-f249-8fc75bfe5ee3" [ 687.858040] env[61905]: _type = "Task" [ 687.858040] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.866876] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f35ad7-a74d-4644-f249-8fc75bfe5ee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.334381] env[61905]: DEBUG nova.scheduler.client.report [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 688.369840] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f35ad7-a74d-4644-f249-8fc75bfe5ee3, 'name': SearchDatastore_Task, 'duration_secs': 0.009537} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.369967] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.370395] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.370395] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.370528] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.370691] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.370947] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ba35aa0-d4ef-4fe2-846d-f1eb7a1360b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.380080] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.380080] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.380523] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62237115-d3ce-453f-8ac2-5543adfae63c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.390254] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 688.390254] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e3472f-08df-dfc8-3f16-c97cbb68023d" [ 688.390254] env[61905]: _type = "Task" [ 688.390254] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.399677] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e3472f-08df-dfc8-3f16-c97cbb68023d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.837512] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.838232] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.840521] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.843017] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 688.843793] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.400s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.844145] env[61905]: DEBUG nova.objects.instance [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lazy-loading 'resources' on Instance uuid e6a063b4-d4f8-46ae-89ae-2d66637896ae {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 688.900263] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e3472f-08df-dfc8-3f16-c97cbb68023d, 'name': SearchDatastore_Task, 'duration_secs': 0.009126} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.902664] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cba58cd6-8b75-47f9-af66-a03c0a0ac0ef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.906607] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 688.906607] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5290fe33-116c-02b1-11b2-1bf04b33e0f3" [ 688.906607] env[61905]: _type = "Task" [ 688.906607] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.915344] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5290fe33-116c-02b1-11b2-1bf04b33e0f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.348155] env[61905]: DEBUG nova.compute.utils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.349629] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 689.349793] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.418254] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5290fe33-116c-02b1-11b2-1bf04b33e0f3, 'name': SearchDatastore_Task, 'duration_secs': 0.009962} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.422243] env[61905]: DEBUG nova.policy [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ac5703119f64e9c9adbf5773bdbef7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e72eb02f21dc4ef19aac3c79dfc6f0b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 689.423614] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.423868] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 84428003-72b1-467a-baf5-06ac37205622/84428003-72b1-467a-baf5-06ac37205622.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.425381] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f47537a5-e206-424e-98a1-7617259a692c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.436674] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 689.436674] env[61905]: value = "task-1362345" [ 689.436674] env[61905]: _type = "Task" [ 689.436674] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.449246] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.790232] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Successfully created port: 98c51200-19dc-47c3-a19a-f3e236dc3f45 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.859059] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 689.889978] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca00525-0ebe-409e-8b5e-9f08b4ef9e0e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.901498] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f00dc8-d498-4b96-bf36-a0c4d7b2332a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.941570] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ac2c4a-4fa8-4e10-a70a-59e6d7ceffcf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.950042] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362345, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488481} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.953108] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 84428003-72b1-467a-baf5-06ac37205622/84428003-72b1-467a-baf5-06ac37205622.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 689.953365] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 689.956632] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ee20196-a261-4337-9d3a-1f26b7de6178 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.960517] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9120cae4-70c3-4f11-a0b4-9d96d203b01d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.968690] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 689.968690] env[61905]: value = "task-1362346" [ 689.968690] env[61905]: _type = "Task" [ 689.968690] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.976756] env[61905]: DEBUG nova.compute.provider_tree [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.482470] env[61905]: DEBUG nova.scheduler.client.report [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.488491] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080509} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.488958] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.489791] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6288ea2-b3e6-4ff0-a7db-e6a8cb4eb05c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.516467] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 84428003-72b1-467a-baf5-06ac37205622/84428003-72b1-467a-baf5-06ac37205622.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.517354] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fd24e8b-fdd6-4d2c-b5df-ef5d960c72eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.537534] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 690.537534] env[61905]: value = "task-1362347" [ 690.537534] env[61905]: _type = "Task" [ 690.537534] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.545767] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.872997] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 690.909070] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.909070] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.909070] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.909545] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.909545] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.909545] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.909751] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.909905] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.910138] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.910245] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.910415] env[61905]: DEBUG nova.virt.hardware [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.911633] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1d2027-997a-425f-8483-6851a71b1ec9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.922627] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a084d677-a7a4-474c-8d05-b77063d4c726 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.993130] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.993130] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.495s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.019080] env[61905]: INFO nova.scheduler.client.report [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted allocations for instance e6a063b4-d4f8-46ae-89ae-2d66637896ae [ 691.047498] env[61905]: DEBUG nova.compute.manager [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Received event network-changed-98c51200-19dc-47c3-a19a-f3e236dc3f45 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 691.047688] env[61905]: DEBUG nova.compute.manager [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Refreshing instance network info cache due to event network-changed-98c51200-19dc-47c3-a19a-f3e236dc3f45. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 691.047900] env[61905]: DEBUG oslo_concurrency.lockutils [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] Acquiring lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.048048] env[61905]: DEBUG oslo_concurrency.lockutils [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] Acquired lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.048206] env[61905]: DEBUG nova.network.neutron [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Refreshing network info cache for port 98c51200-19dc-47c3-a19a-f3e236dc3f45 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.056753] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.199104] env[61905]: ERROR nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 691.199104] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.199104] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.199104] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.199104] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.199104] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.199104] env[61905]: ERROR nova.compute.manager raise self.value [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.199104] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 691.199104] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.199104] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 691.199660] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.199660] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 691.199660] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 691.199660] env[61905]: ERROR nova.compute.manager [ 691.199660] env[61905]: Traceback (most recent call last): [ 691.199660] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 691.199660] env[61905]: listener.cb(fileno) [ 691.199660] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.199660] env[61905]: result = function(*args, **kwargs) [ 691.199660] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.199660] env[61905]: return func(*args, **kwargs) [ 691.199660] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.199660] env[61905]: raise e [ 691.199660] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.199660] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 691.199660] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.199660] env[61905]: created_port_ids = self._update_ports_for_instance( [ 691.199660] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.199660] env[61905]: with excutils.save_and_reraise_exception(): [ 691.199660] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.199660] env[61905]: self.force_reraise() [ 691.199660] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.199660] env[61905]: raise self.value [ 691.199660] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.199660] env[61905]: updated_port = self._update_port( [ 691.199660] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.199660] env[61905]: _ensure_no_port_binding_failure(port) [ 691.199660] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.199660] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 691.200524] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 691.200524] env[61905]: Removing descriptor: 17 [ 691.200524] env[61905]: ERROR nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Traceback (most recent call last): [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] yield resources [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self.driver.spawn(context, instance, image_meta, [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.200524] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] vm_ref = self.build_virtual_machine(instance, [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] for vif in network_info: [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return self._sync_wrapper(fn, *args, **kwargs) [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self.wait() [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self[:] = self._gt.wait() [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return self._exit_event.wait() [ 691.200942] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] result = hub.switch() [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return self.greenlet.switch() [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] result = function(*args, **kwargs) [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return func(*args, **kwargs) [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] raise e [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] nwinfo = self.network_api.allocate_for_instance( [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.201439] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] created_port_ids = self._update_ports_for_instance( [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] with excutils.save_and_reraise_exception(): [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self.force_reraise() [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] raise self.value [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] updated_port = self._update_port( [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] _ensure_no_port_binding_failure(port) [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.201905] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] raise exception.PortBindingFailed(port_id=port['id']) [ 691.202259] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 691.202259] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] [ 691.202259] env[61905]: INFO nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Terminating instance [ 691.203629] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquiring lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.528714] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52451c53-e989-4d6d-ba64-39838ad07644 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "e6a063b4-d4f8-46ae-89ae-2d66637896ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.850s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.549224] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362347, 'name': ReconfigVM_Task, 'duration_secs': 0.775879} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.552577] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 84428003-72b1-467a-baf5-06ac37205622/84428003-72b1-467a-baf5-06ac37205622.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.553454] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-caca395e-e6d3-4dc3-9121-b13c9d050240 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.560718] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 691.560718] env[61905]: value = "task-1362348" [ 691.560718] env[61905]: _type = "Task" [ 691.560718] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.571847] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.571847] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.576087] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362348, 'name': Rename_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.588262] env[61905]: DEBUG nova.network.neutron [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.605091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.605625] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.640807] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "111d10e8-7e36-48b6-be45-2275c36fbee4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.641046] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.703773] env[61905]: DEBUG nova.network.neutron [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.953468] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4372034-b628-4997-b232-c988f6bf9110 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.961435] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bdd314-2b49-4ec9-bdea-b4ce556e3476 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.991401] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23e6e6c-7b9e-42fc-8c0a-da2fe3176afb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.998942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e135c7ca-5ecf-42a1-896f-447332a53f5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.011662] env[61905]: DEBUG nova.compute.provider_tree [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.070228] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362348, 'name': Rename_Task, 'duration_secs': 0.136898} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.070515] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.070758] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc1eaf77-ea31-4eee-9707-10aee48aa948 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.076699] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 692.076699] env[61905]: value = "task-1362349" [ 692.076699] env[61905]: _type = "Task" [ 692.076699] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.083633] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.206061] env[61905]: DEBUG oslo_concurrency.lockutils [req-2bba017e-a43b-4102-92df-af683c35f4c6 req-908e0c26-92ba-45ea-aff8-6abc1d21b291 service nova] Releasing lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.207619] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquired lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.207619] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.405507] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "b9199119-9d4e-4b04-8675-22f6680da8b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.405926] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "b9199119-9d4e-4b04-8675-22f6680da8b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.406019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "b9199119-9d4e-4b04-8675-22f6680da8b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.406787] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "b9199119-9d4e-4b04-8675-22f6680da8b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.406858] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "b9199119-9d4e-4b04-8675-22f6680da8b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.411019] env[61905]: INFO nova.compute.manager [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Terminating instance [ 692.411725] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "refresh_cache-b9199119-9d4e-4b04-8675-22f6680da8b1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.411869] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquired lock "refresh_cache-b9199119-9d4e-4b04-8675-22f6680da8b1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.412052] env[61905]: DEBUG nova.network.neutron [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.515310] env[61905]: DEBUG nova.scheduler.client.report [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.587761] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362349, 'name': PowerOnVM_Task} progress is 95%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.735057] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.814089] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.930500] env[61905]: DEBUG nova.network.neutron [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.987787] env[61905]: DEBUG nova.network.neutron [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.022079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.029s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.022815] env[61905]: ERROR nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] Traceback (most recent call last): [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self.driver.spawn(context, instance, image_meta, [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] vm_ref = self.build_virtual_machine(instance, [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] vif_infos = vmwarevif.get_vif_info(self._session, [ 693.022815] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] for vif in network_info: [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return self._sync_wrapper(fn, *args, **kwargs) [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self.wait() [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self[:] = self._gt.wait() [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return self._exit_event.wait() [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] result = hub.switch() [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 693.023124] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return self.greenlet.switch() [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] result = function(*args, **kwargs) [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] return func(*args, **kwargs) [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] raise e [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] nwinfo = self.network_api.allocate_for_instance( [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] created_port_ids = self._update_ports_for_instance( [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] with excutils.save_and_reraise_exception(): [ 693.023469] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] self.force_reraise() [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] raise self.value [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] updated_port = self._update_port( [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] _ensure_no_port_binding_failure(port) [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] raise exception.PortBindingFailed(port_id=port['id']) [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] nova.exception.PortBindingFailed: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. [ 693.023856] env[61905]: ERROR nova.compute.manager [instance: ded96da7-74a4-4364-8424-22000411f5fe] [ 693.024158] env[61905]: DEBUG nova.compute.utils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 693.024764] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.159s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.026212] env[61905]: INFO nova.compute.claims [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.029121] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Build of instance ded96da7-74a4-4364-8424-22000411f5fe was re-scheduled: Binding failed for port 96ccd575-825e-42dc-8ec0-df5e3468eaca, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 693.029549] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 693.029792] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquiring lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.029953] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Acquired lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.030124] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.072557] env[61905]: DEBUG nova.compute.manager [req-17c91448-7f1c-4520-8e9d-b42f04b14ae1 req-bbd693fb-e2fd-4b62-90ee-98134c7d2d24 service nova] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Received event network-vif-deleted-98c51200-19dc-47c3-a19a-f3e236dc3f45 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 693.086702] env[61905]: DEBUG oslo_vmware.api [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362349, 'name': PowerOnVM_Task, 'duration_secs': 0.541306} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.086926] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.087137] env[61905]: INFO nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Took 5.88 seconds to spawn the instance on the hypervisor. [ 693.087311] env[61905]: DEBUG nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 693.088119] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46dd84b3-dbc6-48f4-8543-b183a33a74c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.317622] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Releasing lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.318064] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 693.318148] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.318452] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6b43452-746a-4cab-9cb8-c2fa9225befe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.327404] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc177cfc-f3e3-42ae-b9f1-a7c28a58c649 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.350299] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ae6338f-289f-415a-b261-3be2f9948572 could not be found. [ 693.350647] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.350724] env[61905]: INFO nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Took 0.03 seconds to destroy the instance on the hypervisor. [ 693.350981] env[61905]: DEBUG oslo.service.loopingcall [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.351197] env[61905]: DEBUG nova.compute.manager [-] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.351278] env[61905]: DEBUG nova.network.neutron [-] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.365578] env[61905]: DEBUG nova.network.neutron [-] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.491213] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Releasing lock "refresh_cache-b9199119-9d4e-4b04-8675-22f6680da8b1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.491747] env[61905]: DEBUG nova.compute.manager [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 693.491975] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.492896] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f9578b-a507-4b23-921e-33e062e06310 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.501293] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 693.501540] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3d78761-d663-4d36-93e7-b25abb1ccbdc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.507766] env[61905]: DEBUG oslo_vmware.api [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 693.507766] env[61905]: value = "task-1362350" [ 693.507766] env[61905]: _type = "Task" [ 693.507766] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.515769] env[61905]: DEBUG oslo_vmware.api [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.556033] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.605239] env[61905]: INFO nova.compute.manager [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Took 26.85 seconds to build instance. [ 693.624774] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.868014] env[61905]: DEBUG nova.network.neutron [-] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.018569] env[61905]: DEBUG oslo_vmware.api [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362350, 'name': PowerOffVM_Task, 'duration_secs': 0.106989} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.018866] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 694.019025] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 694.020024] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa83def9-b5e6-4176-a9d1-7a967360bb22 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.041928] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 694.042147] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 694.042338] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleting the datastore file [datastore2] b9199119-9d4e-4b04-8675-22f6680da8b1 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 694.042631] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cead0953-2985-4fd7-9f4f-865fccccc96e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.048497] env[61905]: DEBUG oslo_vmware.api [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for the task: (returnval){ [ 694.048497] env[61905]: value = "task-1362352" [ 694.048497] env[61905]: _type = "Task" [ 694.048497] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.056090] env[61905]: DEBUG oslo_vmware.api [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.108789] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ca9239bd-5537-45a8-b2e7-adeccd92c4b9 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "84428003-72b1-467a-baf5-06ac37205622" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 120.389s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.127863] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Releasing lock "refresh_cache-ded96da7-74a4-4364-8424-22000411f5fe" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.128162] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 694.128390] env[61905]: DEBUG nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 694.128508] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.148011] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.352451] env[61905]: DEBUG nova.compute.manager [None req-633abafa-a7c4-4d2e-a43b-dcbf2178d538 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 694.353383] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f269fc-1b51-459e-843f-1a4d05ebc93d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.371786] env[61905]: INFO nova.compute.manager [-] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Took 1.02 seconds to deallocate network for instance. [ 694.373897] env[61905]: DEBUG nova.compute.claims [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.374180] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.452415] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "84428003-72b1-467a-baf5-06ac37205622" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.452741] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "84428003-72b1-467a-baf5-06ac37205622" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.452982] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "84428003-72b1-467a-baf5-06ac37205622-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.453223] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "84428003-72b1-467a-baf5-06ac37205622-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.453432] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "84428003-72b1-467a-baf5-06ac37205622-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.456911] env[61905]: INFO nova.compute.manager [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Terminating instance [ 694.459210] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "refresh_cache-84428003-72b1-467a-baf5-06ac37205622" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.460186] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquired lock "refresh_cache-84428003-72b1-467a-baf5-06ac37205622" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.460186] env[61905]: DEBUG nova.network.neutron [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.473670] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db97f8c-789c-4579-919a-e7825b024c3f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.483754] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784d7cab-5197-471f-a816-af9b3b5b1990 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.517340] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ace5fef-76ff-4182-a6b9-56a4937ac2b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.524595] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afcd154-89a3-4be5-b80a-714eb9294f76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.539435] env[61905]: DEBUG nova.compute.provider_tree [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.557712] env[61905]: DEBUG oslo_vmware.api [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Task: {'id': task-1362352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083441} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.558524] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.558524] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 694.558524] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 694.558696] env[61905]: INFO nova.compute.manager [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Took 1.07 seconds to destroy the instance on the hypervisor. [ 694.558852] env[61905]: DEBUG oslo.service.loopingcall [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 694.559044] env[61905]: DEBUG nova.compute.manager [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 694.559142] env[61905]: DEBUG nova.network.neutron [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.576243] env[61905]: DEBUG nova.network.neutron [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.611487] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 694.652742] env[61905]: DEBUG nova.network.neutron [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.866957] env[61905]: INFO nova.compute.manager [None req-633abafa-a7c4-4d2e-a43b-dcbf2178d538 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] instance snapshotting [ 694.867628] env[61905]: DEBUG nova.objects.instance [None req-633abafa-a7c4-4d2e-a43b-dcbf2178d538 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lazy-loading 'flavor' on Instance uuid 84428003-72b1-467a-baf5-06ac37205622 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 694.986853] env[61905]: DEBUG nova.network.neutron [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.035010] env[61905]: DEBUG nova.network.neutron [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.042116] env[61905]: DEBUG nova.scheduler.client.report [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.078806] env[61905]: DEBUG nova.network.neutron [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.129740] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.154940] env[61905]: INFO nova.compute.manager [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] [instance: ded96da7-74a4-4364-8424-22000411f5fe] Took 1.03 seconds to deallocate network for instance. [ 695.374747] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2070d63c-40e9-4d74-9e8f-a18d377bba86 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.395152] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cb66db-b738-43d7-935c-070078ca958d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.539144] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Releasing lock "refresh_cache-84428003-72b1-467a-baf5-06ac37205622" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.539557] env[61905]: DEBUG nova.compute.manager [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 695.539747] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.540934] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c80ea7-64e4-4ffa-ac21-1ae2f84a4452 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.546449] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.546961] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 695.552371] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.595s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.554106] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.554566] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45e5c6b2-59ff-4595-a111-51ff43dde0f1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.562039] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 695.562039] env[61905]: value = "task-1362353" [ 695.562039] env[61905]: _type = "Task" [ 695.562039] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.570506] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.582131] env[61905]: INFO nova.compute.manager [-] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Took 1.02 seconds to deallocate network for instance. [ 695.905239] env[61905]: DEBUG nova.compute.manager [None req-633abafa-a7c4-4d2e-a43b-dcbf2178d538 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Instance disappeared during snapshot {{(pid=61905) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 696.040393] env[61905]: DEBUG nova.compute.manager [None req-633abafa-a7c4-4d2e-a43b-dcbf2178d538 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Found 0 images (rotation: 2) {{(pid=61905) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 696.056169] env[61905]: DEBUG nova.compute.utils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.061025] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 696.061133] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.076027] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.089550] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.152726] env[61905]: DEBUG nova.policy [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '947946764fc64847946057d867de54bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '980cc259c0254e84989e0cfc0e45837f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 696.194208] env[61905]: INFO nova.scheduler.client.report [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Deleted allocations for instance ded96da7-74a4-4364-8424-22000411f5fe [ 696.506207] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Successfully created port: 1700f4ea-6978-4247-ade7-87777bf566df {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.510119] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cce00a1-7ef7-4221-8145-4c917a69291e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.517266] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85bbae3-c092-465c-91f6-f209d85908ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.549320] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bde6f6-8033-4f3b-8712-064276b1a73c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.557063] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec06de2-92ad-4484-9a0c-4030f7ad9769 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.561743] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 696.577767] env[61905]: DEBUG nova.compute.provider_tree [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.585418] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362353, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.704864] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9baf920d-f1fc-41d9-876a-aeae7910c16e tempest-ServerDiagnosticsTest-513152053 tempest-ServerDiagnosticsTest-513152053-project-member] Lock "ded96da7-74a4-4364-8424-22000411f5fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.850s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.086644] env[61905]: DEBUG nova.scheduler.client.report [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.090018] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362353, 'name': PowerOffVM_Task, 'duration_secs': 1.056133} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.094710] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.094906] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 697.095194] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99edeab8-4184-49c5-b86d-35b79b84fdfc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.120635] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 697.120875] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 697.121084] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Deleting the datastore file [datastore2] 84428003-72b1-467a-baf5-06ac37205622 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 697.121352] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7aa79af0-bb71-41cc-9962-0c059c6a03e0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.131468] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for the task: (returnval){ [ 697.131468] env[61905]: value = "task-1362355" [ 697.131468] env[61905]: _type = "Task" [ 697.131468] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.141438] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.208768] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 697.497802] env[61905]: DEBUG nova.compute.manager [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Received event network-changed-1700f4ea-6978-4247-ade7-87777bf566df {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 697.498025] env[61905]: DEBUG nova.compute.manager [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Refreshing instance network info cache due to event network-changed-1700f4ea-6978-4247-ade7-87777bf566df. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 697.498237] env[61905]: DEBUG oslo_concurrency.lockutils [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] Acquiring lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.498377] env[61905]: DEBUG oslo_concurrency.lockutils [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] Acquired lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.498997] env[61905]: DEBUG nova.network.neutron [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Refreshing network info cache for port 1700f4ea-6978-4247-ade7-87777bf566df {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 697.577857] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 697.598309] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.045s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.599069] env[61905]: ERROR nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Traceback (most recent call last): [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self.driver.spawn(context, instance, image_meta, [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] vm_ref = self.build_virtual_machine(instance, [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.599069] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] for vif in network_info: [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return self._sync_wrapper(fn, *args, **kwargs) [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self.wait() [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self[:] = self._gt.wait() [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return self._exit_event.wait() [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] result = hub.switch() [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.599435] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return self.greenlet.switch() [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] result = function(*args, **kwargs) [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] return func(*args, **kwargs) [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] raise e [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] nwinfo = self.network_api.allocate_for_instance( [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] created_port_ids = self._update_ports_for_instance( [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] with excutils.save_and_reraise_exception(): [ 697.599777] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] self.force_reraise() [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] raise self.value [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] updated_port = self._update_port( [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] _ensure_no_port_binding_failure(port) [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] raise exception.PortBindingFailed(port_id=port['id']) [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] nova.exception.PortBindingFailed: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. [ 697.600748] env[61905]: ERROR nova.compute.manager [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] [ 697.601082] env[61905]: DEBUG nova.compute.utils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.601117] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.150s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.607298] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Build of instance 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434 was re-scheduled: Binding failed for port fe190d5e-c458-4a01-994f-4b475d299d78, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 697.608288] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 697.608288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquiring lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.608288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Acquired lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.609727] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.613750] env[61905]: ERROR nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 697.613750] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.613750] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.613750] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.613750] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.613750] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.613750] env[61905]: ERROR nova.compute.manager raise self.value [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.613750] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 697.613750] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.613750] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 697.614699] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.614699] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 697.614699] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 697.614699] env[61905]: ERROR nova.compute.manager [ 697.614699] env[61905]: Traceback (most recent call last): [ 697.614699] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 697.614699] env[61905]: listener.cb(fileno) [ 697.614699] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.614699] env[61905]: result = function(*args, **kwargs) [ 697.614699] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.614699] env[61905]: return func(*args, **kwargs) [ 697.614699] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.614699] env[61905]: raise e [ 697.614699] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.614699] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 697.614699] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.614699] env[61905]: created_port_ids = self._update_ports_for_instance( [ 697.614699] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.614699] env[61905]: with excutils.save_and_reraise_exception(): [ 697.614699] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.614699] env[61905]: self.force_reraise() [ 697.614699] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.614699] env[61905]: raise self.value [ 697.614699] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.614699] env[61905]: updated_port = self._update_port( [ 697.614699] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.614699] env[61905]: _ensure_no_port_binding_failure(port) [ 697.614699] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.614699] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 697.616140] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 697.616140] env[61905]: Removing descriptor: 17 [ 697.618311] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.618595] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.618694] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.618853] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.619048] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.619288] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.619499] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.619656] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.619820] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.619976] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.620159] env[61905]: DEBUG nova.virt.hardware [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.621071] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e47e1dc-9929-498c-aed7-af17e390851d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.629973] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6679891-9028-4a1b-a1ab-c6122d3c866e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.651092] env[61905]: ERROR nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Traceback (most recent call last): [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] yield resources [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self.driver.spawn(context, instance, image_meta, [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] vm_ref = self.build_virtual_machine(instance, [ 697.651092] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] for vif in network_info: [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] return self._sync_wrapper(fn, *args, **kwargs) [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self.wait() [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self[:] = self._gt.wait() [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] return self._exit_event.wait() [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.651668] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] current.throw(*self._exc) [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] result = function(*args, **kwargs) [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] return func(*args, **kwargs) [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] raise e [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] nwinfo = self.network_api.allocate_for_instance( [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] created_port_ids = self._update_ports_for_instance( [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] with excutils.save_and_reraise_exception(): [ 697.652299] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self.force_reraise() [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] raise self.value [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] updated_port = self._update_port( [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] _ensure_no_port_binding_failure(port) [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] raise exception.PortBindingFailed(port_id=port['id']) [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 697.652991] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] [ 697.652991] env[61905]: INFO nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Terminating instance [ 697.656340] env[61905]: DEBUG oslo_vmware.api [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Task: {'id': task-1362355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102666} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.656861] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.657088] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.657253] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.657419] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.657585] env[61905]: INFO nova.compute.manager [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] [instance: 84428003-72b1-467a-baf5-06ac37205622] Took 2.12 seconds to destroy the instance on the hypervisor. [ 697.657934] env[61905]: DEBUG oslo.service.loopingcall [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.658190] env[61905]: DEBUG nova.compute.manager [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 697.658278] env[61905]: DEBUG nova.network.neutron [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.675292] env[61905]: DEBUG nova.network.neutron [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.733728] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.032261] env[61905]: DEBUG nova.network.neutron [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.140257] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.178083] env[61905]: DEBUG nova.network.neutron [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.179277] env[61905]: DEBUG nova.network.neutron [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.240413] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.570228] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a57e5ee-8a8c-4105-8f2e-85db1eed9098 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.580436] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2025519-9dc3-4e73-83ac-b1a540c044c0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.617626] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b06ec4-55e4-48d5-b1a3-269b8744e035 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.625926] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c38299-3276-4783-abdf-708ecdc771fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.640974] env[61905]: DEBUG nova.compute.provider_tree [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.681797] env[61905]: DEBUG oslo_concurrency.lockutils [req-89e16719-e0cc-4ce9-900e-7f9979e3439c req-2d3366ea-65b4-49e7-9eca-78bb79aa469c service nova] Releasing lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.682272] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.682459] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 698.683842] env[61905]: INFO nova.compute.manager [-] [instance: 84428003-72b1-467a-baf5-06ac37205622] Took 1.03 seconds to deallocate network for instance. [ 698.724158] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.724367] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.724511] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 698.724656] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 698.739866] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Releasing lock "refresh_cache-677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.740117] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 698.740285] env[61905]: DEBUG nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 698.740447] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.757096] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.144331] env[61905]: DEBUG nova.scheduler.client.report [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.192965] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.203756] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 699.228096] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 699.228310] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 699.228373] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 699.248203] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-b9199119-9d4e-4b04-8675-22f6680da8b1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.248352] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-b9199119-9d4e-4b04-8675-22f6680da8b1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.248860] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 699.248860] env[61905]: DEBUG nova.objects.instance [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lazy-loading 'info_cache' on Instance uuid b9199119-9d4e-4b04-8675-22f6680da8b1 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 699.262111] env[61905]: DEBUG nova.network.neutron [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.317131] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.547218] env[61905]: DEBUG nova.compute.manager [req-ed29fb47-2ad4-4fd0-96fe-9191e541676c req-30653e4b-9db3-42cd-a4ba-b1f5ea41e393 service nova] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Received event network-vif-deleted-1700f4ea-6978-4247-ade7-87777bf566df {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 699.649571] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.048s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.650362] env[61905]: ERROR nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Traceback (most recent call last): [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self.driver.spawn(context, instance, image_meta, [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] vm_ref = self.build_virtual_machine(instance, [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.650362] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] for vif in network_info: [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return self._sync_wrapper(fn, *args, **kwargs) [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self.wait() [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self[:] = self._gt.wait() [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return self._exit_event.wait() [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] result = hub.switch() [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 699.650771] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return self.greenlet.switch() [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] result = function(*args, **kwargs) [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] return func(*args, **kwargs) [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] raise e [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] nwinfo = self.network_api.allocate_for_instance( [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] created_port_ids = self._update_ports_for_instance( [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] with excutils.save_and_reraise_exception(): [ 699.651160] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] self.force_reraise() [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] raise self.value [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] updated_port = self._update_port( [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] _ensure_no_port_binding_failure(port) [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] raise exception.PortBindingFailed(port_id=port['id']) [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] nova.exception.PortBindingFailed: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. [ 699.651545] env[61905]: ERROR nova.compute.manager [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] [ 699.651871] env[61905]: DEBUG nova.compute.utils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 699.652317] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.604s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.655279] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Build of instance 2a778ae5-37be-4479-b7ff-4468d0433c86 was re-scheduled: Binding failed for port d7b61672-7a84-4801-ab2e-099decf0c67f, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 699.655712] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 699.655933] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.656088] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.656248] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.765960] env[61905]: INFO nova.compute.manager [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] [instance: 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434] Took 1.03 seconds to deallocate network for instance. [ 699.820941] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.821411] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.821599] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.821893] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c9aa70a-5723-4be4-805a-223da1b9d76b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.831552] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018a0082-17a4-4fd4-90d7-f83ab5dd25bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.860626] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7e393163-cd68-4de2-8051-7ec10415e508 could not be found. [ 699.860868] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.861073] env[61905]: INFO nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Took 0.04 seconds to destroy the instance on the hypervisor. [ 699.861320] env[61905]: DEBUG oslo.service.loopingcall [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.861538] env[61905]: DEBUG nova.compute.manager [-] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.861629] env[61905]: DEBUG nova.network.neutron [-] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.880976] env[61905]: DEBUG nova.network.neutron [-] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.254688] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.286865] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.383812] env[61905]: DEBUG nova.network.neutron [-] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.408415] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.569839] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bcc0b8-390f-4bfb-b4c8-53ae36426cb3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.577462] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c454345-4329-418c-b5f8-6531cca84f7e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.607060] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7098d7b7-3317-47ea-a10b-f49aba98c4eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.614477] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e0833e-27d5-44fd-b6fa-ffc7b1b417bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.627959] env[61905]: DEBUG nova.compute.provider_tree [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.799058] env[61905]: INFO nova.scheduler.client.report [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Deleted allocations for instance 677f1b2d-8e7e-43ed-8a99-57fe4d9e4434 [ 700.885056] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.886429] env[61905]: INFO nova.compute.manager [-] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Took 1.02 seconds to deallocate network for instance. [ 700.888601] env[61905]: DEBUG nova.compute.claims [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 700.888745] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.910551] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-2a778ae5-37be-4479-b7ff-4468d0433c86" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.910772] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 700.910933] env[61905]: DEBUG nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.911113] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.931344] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.131241] env[61905]: DEBUG nova.scheduler.client.report [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.308430] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cedc1d76-5f0d-4111-87fb-fa31d6065c2a tempest-MigrationsAdminTest-1651072923 tempest-MigrationsAdminTest-1651072923-project-member] Lock "677f1b2d-8e7e-43ed-8a99-57fe4d9e4434" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.964s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.388162] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-b9199119-9d4e-4b04-8675-22f6680da8b1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.388367] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 701.388560] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.388737] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.388891] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.389043] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.389182] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.389317] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.389438] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 701.389571] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 701.435624] env[61905]: DEBUG nova.network.neutron [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.635816] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.983s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.636483] env[61905]: ERROR nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Traceback (most recent call last): [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self.driver.spawn(context, instance, image_meta, [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] vm_ref = self.build_virtual_machine(instance, [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] vif_infos = vmwarevif.get_vif_info(self._session, [ 701.636483] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] for vif in network_info: [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] return self._sync_wrapper(fn, *args, **kwargs) [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self.wait() [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self[:] = self._gt.wait() [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] return self._exit_event.wait() [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] current.throw(*self._exc) [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.636881] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] result = function(*args, **kwargs) [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] return func(*args, **kwargs) [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] raise e [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] nwinfo = self.network_api.allocate_for_instance( [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] created_port_ids = self._update_ports_for_instance( [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] with excutils.save_and_reraise_exception(): [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] self.force_reraise() [ 701.637336] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] raise self.value [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] updated_port = self._update_port( [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] _ensure_no_port_binding_failure(port) [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] raise exception.PortBindingFailed(port_id=port['id']) [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] nova.exception.PortBindingFailed: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. [ 701.637673] env[61905]: ERROR nova.compute.manager [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] [ 701.637673] env[61905]: DEBUG nova.compute.utils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 701.638906] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.022s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.640404] env[61905]: INFO nova.compute.claims [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.643176] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Build of instance 03f9b48c-4bd1-4018-b34f-267e1575c753 was re-scheduled: Binding failed for port 26e8448d-f7c7-404a-901b-ac6478cb284d, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 701.643672] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 701.643954] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.644169] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.644385] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 701.810834] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 701.893110] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.943478] env[61905]: INFO nova.compute.manager [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 2a778ae5-37be-4479-b7ff-4468d0433c86] Took 1.03 seconds to deallocate network for instance. [ 702.180948] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.328587] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.343963] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.831889] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "refresh_cache-03f9b48c-4bd1-4018-b34f-267e1575c753" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.834492] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 702.834492] env[61905]: DEBUG nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 702.834492] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 702.861600] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.976834] env[61905]: INFO nova.scheduler.client.report [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocations for instance 2a778ae5-37be-4479-b7ff-4468d0433c86 [ 703.137851] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d5562d-4f66-4914-bb17-9f52b9b4e9af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.145560] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da170c0-c972-4b84-913c-ba7f512714a5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.178542] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b1930a-c293-4779-b087-d7677256ccbd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.192671] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d408a9c-4081-486b-9229-775e46698df0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.206243] env[61905]: DEBUG nova.compute.provider_tree [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.363997] env[61905]: DEBUG nova.network.neutron [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.495102] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d82c84-d01a-4ea8-8652-94d4066a275b tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "2a778ae5-37be-4479-b7ff-4468d0433c86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.855s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.710175] env[61905]: DEBUG nova.scheduler.client.report [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.868999] env[61905]: INFO nova.compute.manager [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 03f9b48c-4bd1-4018-b34f-267e1575c753] Took 1.04 seconds to deallocate network for instance. [ 704.000079] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 704.216662] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.217027] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 704.223135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.793s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.224269] env[61905]: INFO nova.compute.claims [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.387265] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "12c21d8e-1941-4481-9216-015ba6c09b9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.387749] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.525607] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.733492] env[61905]: DEBUG nova.compute.utils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.735312] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 704.735545] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 704.803471] env[61905]: DEBUG nova.policy [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd84eed64481b48438f85cd60bb3756ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00166d3f12b24cafb91c4cd200ad8762', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 704.919753] env[61905]: INFO nova.scheduler.client.report [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted allocations for instance 03f9b48c-4bd1-4018-b34f-267e1575c753 [ 705.159031] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Successfully created port: df3f464d-8204-4dcf-aa3e-bdb12757450c {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.241944] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 705.433898] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c6cd540a-9b4c-4557-ac34-fb173c0c41d6 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "03f9b48c-4bd1-4018-b34f-267e1575c753" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.293s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.627279] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.627510] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.735061] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c828fe6-6ab3-41a4-8849-81bd1ebbe8fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.742688] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9830861-d44e-4e35-bde6-5ef1bdc00359 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.781609] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a770ee-985e-4d84-8b7c-43b873cff510 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.789920] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3647185d-1999-4b93-ae01-9f62a6f02168 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.805236] env[61905]: DEBUG nova.compute.provider_tree [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.939023] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 706.253849] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 706.279991] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.279991] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.280151] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.280275] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.280886] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.280886] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.280886] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.281425] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.281425] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.281425] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.281531] env[61905]: DEBUG nova.virt.hardware [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.282402] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a36787c-a057-4f18-9764-0dabf3756785 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.290605] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfd7111-1da1-4995-b7d9-0b27a89d6d99 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.309720] env[61905]: DEBUG nova.scheduler.client.report [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.467645] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.610737] env[61905]: DEBUG nova.compute.manager [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Received event network-changed-df3f464d-8204-4dcf-aa3e-bdb12757450c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 706.610878] env[61905]: DEBUG nova.compute.manager [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Refreshing instance network info cache due to event network-changed-df3f464d-8204-4dcf-aa3e-bdb12757450c. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 706.611138] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] Acquiring lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.611377] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] Acquired lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.611553] env[61905]: DEBUG nova.network.neutron [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Refreshing network info cache for port df3f464d-8204-4dcf-aa3e-bdb12757450c {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.815988] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.816570] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 706.819420] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.438s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.820971] env[61905]: INFO nova.compute.claims [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.880023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.880023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.914057] env[61905]: ERROR nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 706.914057] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.914057] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.914057] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.914057] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.914057] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.914057] env[61905]: ERROR nova.compute.manager raise self.value [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.914057] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 706.914057] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.914057] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 706.914538] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.914538] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 706.914538] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 706.914538] env[61905]: ERROR nova.compute.manager [ 706.914538] env[61905]: Traceback (most recent call last): [ 706.914538] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 706.914538] env[61905]: listener.cb(fileno) [ 706.914538] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.914538] env[61905]: result = function(*args, **kwargs) [ 706.914538] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.914538] env[61905]: return func(*args, **kwargs) [ 706.914538] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.914538] env[61905]: raise e [ 706.914538] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.914538] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 706.914538] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.914538] env[61905]: created_port_ids = self._update_ports_for_instance( [ 706.914538] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.914538] env[61905]: with excutils.save_and_reraise_exception(): [ 706.914538] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.914538] env[61905]: self.force_reraise() [ 706.914538] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.914538] env[61905]: raise self.value [ 706.914538] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.914538] env[61905]: updated_port = self._update_port( [ 706.914538] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.914538] env[61905]: _ensure_no_port_binding_failure(port) [ 706.914538] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.914538] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 706.915359] env[61905]: nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 706.915359] env[61905]: Removing descriptor: 18 [ 706.919022] env[61905]: ERROR nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Traceback (most recent call last): [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] yield resources [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self.driver.spawn(context, instance, image_meta, [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] vm_ref = self.build_virtual_machine(instance, [ 706.919022] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] for vif in network_info: [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return self._sync_wrapper(fn, *args, **kwargs) [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self.wait() [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self[:] = self._gt.wait() [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return self._exit_event.wait() [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 706.919411] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] result = hub.switch() [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return self.greenlet.switch() [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] result = function(*args, **kwargs) [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return func(*args, **kwargs) [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] raise e [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] nwinfo = self.network_api.allocate_for_instance( [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] created_port_ids = self._update_ports_for_instance( [ 706.919754] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] with excutils.save_and_reraise_exception(): [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self.force_reraise() [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] raise self.value [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] updated_port = self._update_port( [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] _ensure_no_port_binding_failure(port) [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] raise exception.PortBindingFailed(port_id=port['id']) [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 706.920105] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] [ 706.920503] env[61905]: INFO nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Terminating instance [ 706.921947] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquiring lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.136222] env[61905]: DEBUG nova.network.neutron [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.273550] env[61905]: DEBUG nova.network.neutron [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.327377] env[61905]: DEBUG nova.compute.utils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.336066] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.336066] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.395538] env[61905]: DEBUG nova.policy [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8a39c9ecc5146eab73e5bd83e5258e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f453aa297d54442a653eab4a59a3dc5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 707.779680] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf23b1da-a2a2-4807-8c86-0c1d4da9c19c req-3692a485-af90-43c6-844e-0419e1bfee69 service nova] Releasing lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.779680] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquired lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.779680] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.838804] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 708.100021] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Successfully created port: 281dd8bc-eac0-44e0-8975-9c6f1182642c {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.308288] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.352372] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964286a0-13b1-420a-a035-f14aaaf3e2ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.362746] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6ae52f-e381-485e-b6d8-bcb661a4c919 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.400580] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b3b564-3f41-4a8b-9913-9cb258fd1c7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.409274] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31958cae-e098-441e-ac4f-0b5d68c2ea9d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.423613] env[61905]: DEBUG nova.compute.provider_tree [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.528354] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.653162] env[61905]: DEBUG nova.compute.manager [req-7d83b302-3975-4760-926c-9465919cdc60 req-60fc041e-49fb-48e0-b0c4-c1ad3c582a7d service nova] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Received event network-vif-deleted-df3f464d-8204-4dcf-aa3e-bdb12757450c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 708.851867] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 708.887540] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.889534] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.889534] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.889534] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.889534] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.889534] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.889763] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.889763] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.889920] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.890066] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.890275] env[61905]: DEBUG nova.virt.hardware [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.891971] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4398a9-9654-4dfc-b50d-e231db77f81e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.900819] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acbe319-1688-4e1b-8c59-5970ffeb93d5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.927143] env[61905]: DEBUG nova.scheduler.client.report [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 709.033620] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Releasing lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.034096] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 709.034289] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 709.034680] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d10a29ab-0115-40e9-a3cf-55d2f4c3c9b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.045680] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285b3273-3381-4096-a30e-6eb7dc7db045 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.070324] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c7e66b30-f72d-4afd-aded-4a92dd19b388 could not be found. [ 709.070553] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 709.070987] env[61905]: INFO nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Took 0.04 seconds to destroy the instance on the hypervisor. [ 709.070987] env[61905]: DEBUG oslo.service.loopingcall [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 709.071204] env[61905]: DEBUG nova.compute.manager [-] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 709.071301] env[61905]: DEBUG nova.network.neutron [-] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 709.094636] env[61905]: DEBUG nova.network.neutron [-] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.433030] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.433030] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 709.442040] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.061s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.502696] env[61905]: ERROR nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 709.502696] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.502696] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.502696] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.502696] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.502696] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.502696] env[61905]: ERROR nova.compute.manager raise self.value [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.502696] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 709.502696] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.502696] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 709.503141] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.503141] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 709.503141] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 709.503141] env[61905]: ERROR nova.compute.manager [ 709.503141] env[61905]: Traceback (most recent call last): [ 709.503141] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 709.503141] env[61905]: listener.cb(fileno) [ 709.503141] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.503141] env[61905]: result = function(*args, **kwargs) [ 709.503141] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.503141] env[61905]: return func(*args, **kwargs) [ 709.503141] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.503141] env[61905]: raise e [ 709.503141] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.503141] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 709.503141] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.503141] env[61905]: created_port_ids = self._update_ports_for_instance( [ 709.503141] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.503141] env[61905]: with excutils.save_and_reraise_exception(): [ 709.503141] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.503141] env[61905]: self.force_reraise() [ 709.503141] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.503141] env[61905]: raise self.value [ 709.503141] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.503141] env[61905]: updated_port = self._update_port( [ 709.503141] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.503141] env[61905]: _ensure_no_port_binding_failure(port) [ 709.503141] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.503141] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 709.504157] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 709.504157] env[61905]: Removing descriptor: 18 [ 709.504157] env[61905]: ERROR nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Traceback (most recent call last): [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] yield resources [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self.driver.spawn(context, instance, image_meta, [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 709.504157] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] vm_ref = self.build_virtual_machine(instance, [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] for vif in network_info: [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return self._sync_wrapper(fn, *args, **kwargs) [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self.wait() [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self[:] = self._gt.wait() [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return self._exit_event.wait() [ 709.504587] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] result = hub.switch() [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return self.greenlet.switch() [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] result = function(*args, **kwargs) [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return func(*args, **kwargs) [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] raise e [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] nwinfo = self.network_api.allocate_for_instance( [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 709.504997] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] created_port_ids = self._update_ports_for_instance( [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] with excutils.save_and_reraise_exception(): [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self.force_reraise() [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] raise self.value [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] updated_port = self._update_port( [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] _ensure_no_port_binding_failure(port) [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 709.505458] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] raise exception.PortBindingFailed(port_id=port['id']) [ 709.505823] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 709.505823] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] [ 709.505823] env[61905]: INFO nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Terminating instance [ 709.507681] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.507681] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquired lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.507681] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.598990] env[61905]: DEBUG nova.network.neutron [-] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.944229] env[61905]: DEBUG nova.compute.utils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.945914] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.946318] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 710.018928] env[61905]: DEBUG nova.policy [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8a39c9ecc5146eab73e5bd83e5258e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f453aa297d54442a653eab4a59a3dc5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 710.036617] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.103846] env[61905]: INFO nova.compute.manager [-] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Took 1.03 seconds to deallocate network for instance. [ 710.106054] env[61905]: DEBUG nova.compute.claims [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 710.106392] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.175725] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.421030] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901ba80c-518f-465d-aedc-43345226bce5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.429869] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e87f6c7-6bd9-46d4-8904-24e148b82582 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.460247] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 710.464110] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Successfully created port: 5cb52e1a-9d18-439a-a392-5f279c39779d {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.467899] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd59a67-0ee8-4c38-aa22-25ec9aa949a0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.475881] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5846411-71de-4c0b-9730-bfbca6866ad6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.489206] env[61905]: DEBUG nova.compute.provider_tree [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.678609] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Releasing lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.679052] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 710.679325] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.679607] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3aa91ab-0192-4a9f-b804-a1e5aa062092 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.694110] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6e161c-7633-4c2e-b30d-778531ab7224 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.704450] env[61905]: DEBUG nova.compute.manager [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Received event network-changed-281dd8bc-eac0-44e0-8975-9c6f1182642c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 710.704877] env[61905]: DEBUG nova.compute.manager [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Refreshing instance network info cache due to event network-changed-281dd8bc-eac0-44e0-8975-9c6f1182642c. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 710.705225] env[61905]: DEBUG oslo_concurrency.lockutils [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] Acquiring lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.705491] env[61905]: DEBUG oslo_concurrency.lockutils [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] Acquired lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.705762] env[61905]: DEBUG nova.network.neutron [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Refreshing network info cache for port 281dd8bc-eac0-44e0-8975-9c6f1182642c {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.719112] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a1e2a21-a43d-4363-9f1f-683e35d199aa could not be found. [ 710.719485] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.719773] env[61905]: INFO nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 710.720803] env[61905]: DEBUG oslo.service.loopingcall [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.721177] env[61905]: DEBUG nova.compute.manager [-] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.721372] env[61905]: DEBUG nova.network.neutron [-] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 710.740080] env[61905]: DEBUG nova.network.neutron [-] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.992224] env[61905]: DEBUG nova.scheduler.client.report [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 711.240254] env[61905]: DEBUG nova.network.neutron [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.243385] env[61905]: DEBUG nova.network.neutron [-] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.393510] env[61905]: DEBUG nova.network.neutron [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.478059] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 711.497393] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.062s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.498935] env[61905]: ERROR nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Traceback (most recent call last): [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self.driver.spawn(context, instance, image_meta, [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] vm_ref = self.build_virtual_machine(instance, [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.498935] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] for vif in network_info: [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return self._sync_wrapper(fn, *args, **kwargs) [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self.wait() [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self[:] = self._gt.wait() [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return self._exit_event.wait() [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] result = hub.switch() [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.499365] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return self.greenlet.switch() [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] result = function(*args, **kwargs) [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] return func(*args, **kwargs) [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] raise e [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] nwinfo = self.network_api.allocate_for_instance( [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] created_port_ids = self._update_ports_for_instance( [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] with excutils.save_and_reraise_exception(): [ 711.499706] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] self.force_reraise() [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] raise self.value [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] updated_port = self._update_port( [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] _ensure_no_port_binding_failure(port) [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] raise exception.PortBindingFailed(port_id=port['id']) [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] nova.exception.PortBindingFailed: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. [ 711.500142] env[61905]: ERROR nova.compute.manager [instance: 7ae6338f-289f-415a-b261-3be2f9948572] [ 711.500482] env[61905]: DEBUG nova.compute.utils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 711.502269] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.373s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.503753] env[61905]: INFO nova.compute.claims [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.507792] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Build of instance 7ae6338f-289f-415a-b261-3be2f9948572 was re-scheduled: Binding failed for port 98c51200-19dc-47c3-a19a-f3e236dc3f45, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 711.508108] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 711.508363] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquiring lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.508523] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Acquired lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.508977] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.517140] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 711.517264] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 711.517419] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.517596] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 711.517763] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.517983] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 711.518249] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 711.518415] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 711.518638] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 711.518825] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 711.519009] env[61905]: DEBUG nova.virt.hardware [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.519872] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5569edd6-3659-44e1-8b00-586045aabe68 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.529955] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe9fb72-a4e1-4245-9e5e-96dfdaf1e3db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.563336] env[61905]: ERROR nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 711.563336] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.563336] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.563336] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.563336] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.563336] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.563336] env[61905]: ERROR nova.compute.manager raise self.value [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.563336] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.563336] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.563336] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.563795] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.563795] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.563795] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 711.563795] env[61905]: ERROR nova.compute.manager [ 711.563795] env[61905]: Traceback (most recent call last): [ 711.563795] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.563795] env[61905]: listener.cb(fileno) [ 711.563795] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.563795] env[61905]: result = function(*args, **kwargs) [ 711.563795] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.563795] env[61905]: return func(*args, **kwargs) [ 711.563795] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.563795] env[61905]: raise e [ 711.563795] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.563795] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 711.563795] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.563795] env[61905]: created_port_ids = self._update_ports_for_instance( [ 711.563795] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.563795] env[61905]: with excutils.save_and_reraise_exception(): [ 711.563795] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.563795] env[61905]: self.force_reraise() [ 711.563795] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.563795] env[61905]: raise self.value [ 711.563795] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.563795] env[61905]: updated_port = self._update_port( [ 711.563795] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.563795] env[61905]: _ensure_no_port_binding_failure(port) [ 711.563795] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.563795] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.564549] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 711.564549] env[61905]: Removing descriptor: 18 [ 711.564549] env[61905]: ERROR nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Traceback (most recent call last): [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] yield resources [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self.driver.spawn(context, instance, image_meta, [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.564549] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] vm_ref = self.build_virtual_machine(instance, [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] for vif in network_info: [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return self._sync_wrapper(fn, *args, **kwargs) [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self.wait() [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self[:] = self._gt.wait() [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return self._exit_event.wait() [ 711.564886] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] result = hub.switch() [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return self.greenlet.switch() [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] result = function(*args, **kwargs) [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return func(*args, **kwargs) [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] raise e [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] nwinfo = self.network_api.allocate_for_instance( [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 711.565248] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] created_port_ids = self._update_ports_for_instance( [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] with excutils.save_and_reraise_exception(): [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self.force_reraise() [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] raise self.value [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] updated_port = self._update_port( [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] _ensure_no_port_binding_failure(port) [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.565582] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] raise exception.PortBindingFailed(port_id=port['id']) [ 711.565905] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 711.565905] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] [ 711.565905] env[61905]: INFO nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Terminating instance [ 711.566630] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.566838] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquired lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.566982] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.746926] env[61905]: INFO nova.compute.manager [-] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Took 1.03 seconds to deallocate network for instance. [ 711.749237] env[61905]: DEBUG nova.compute.claims [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 711.749411] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.896909] env[61905]: DEBUG oslo_concurrency.lockutils [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] Releasing lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.897235] env[61905]: DEBUG nova.compute.manager [req-64ecac2e-90cf-4d75-8ef0-bf8d550c1dbb req-ec9e77a4-62ea-4085-bd2c-18ccaa724e42 service nova] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Received event network-vif-deleted-281dd8bc-eac0-44e0-8975-9c6f1182642c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 712.025845] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.090197] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.159800] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.198253] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.659607] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Releasing lock "refresh_cache-7ae6338f-289f-415a-b261-3be2f9948572" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.660024] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 712.660024] env[61905]: DEBUG nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.660137] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.674822] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.700693] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Releasing lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.702084] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 712.702201] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.704611] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a162b282-1bf6-45a8-a79d-13f7b04e2a24 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.714877] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6328d585-c6c9-47c6-aaa2-c2d84179f351 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.726230] env[61905]: DEBUG nova.compute.manager [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Received event network-changed-5cb52e1a-9d18-439a-a392-5f279c39779d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 712.726419] env[61905]: DEBUG nova.compute.manager [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Refreshing instance network info cache due to event network-changed-5cb52e1a-9d18-439a-a392-5f279c39779d. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 712.726625] env[61905]: DEBUG oslo_concurrency.lockutils [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] Acquiring lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.726762] env[61905]: DEBUG oslo_concurrency.lockutils [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] Acquired lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.726942] env[61905]: DEBUG nova.network.neutron [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Refreshing network info cache for port 5cb52e1a-9d18-439a-a392-5f279c39779d {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.742359] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3 could not be found. [ 712.742574] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.742752] env[61905]: INFO nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 712.742986] env[61905]: DEBUG oslo.service.loopingcall [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.745529] env[61905]: DEBUG nova.compute.manager [-] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 712.745641] env[61905]: DEBUG nova.network.neutron [-] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.770052] env[61905]: DEBUG nova.network.neutron [-] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.916222] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab68542-a39e-4446-ba08-0acf7507c93c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.923836] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb94b23-36ed-42b1-87d5-473d1650aab9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.956034] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa2c22b-7204-4eb8-a304-13f50c76c50a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.963157] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005f72ae-6a50-40f6-9735-276fbdc71406 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.976595] env[61905]: DEBUG nova.compute.provider_tree [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.177697] env[61905]: DEBUG nova.network.neutron [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.247037] env[61905]: DEBUG nova.network.neutron [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.274237] env[61905]: DEBUG nova.network.neutron [-] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.327628] env[61905]: DEBUG nova.network.neutron [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.480691] env[61905]: DEBUG nova.scheduler.client.report [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 713.680141] env[61905]: INFO nova.compute.manager [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] [instance: 7ae6338f-289f-415a-b261-3be2f9948572] Took 1.02 seconds to deallocate network for instance. [ 713.777053] env[61905]: INFO nova.compute.manager [-] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Took 1.03 seconds to deallocate network for instance. [ 713.779284] env[61905]: DEBUG nova.compute.claims [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 713.779460] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.830575] env[61905]: DEBUG oslo_concurrency.lockutils [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] Releasing lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.830814] env[61905]: DEBUG nova.compute.manager [req-eb66d72d-45e6-4928-9130-29ca50898983 req-8445431b-7c96-42c3-ab9d-7b00bea137ff service nova] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Received event network-vif-deleted-5cb52e1a-9d18-439a-a392-5f279c39779d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 713.985797] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.483s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.986363] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 713.989025] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.900s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.989258] env[61905]: DEBUG nova.objects.instance [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lazy-loading 'resources' on Instance uuid b9199119-9d4e-4b04-8675-22f6680da8b1 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 714.494393] env[61905]: DEBUG nova.compute.utils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 714.495917] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 714.496093] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 714.545463] env[61905]: DEBUG nova.policy [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a8fde5f58469453f8826ab870750bf18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ec5905f12ea4f2ca64b22ad8b3c6bb3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 714.708265] env[61905]: INFO nova.scheduler.client.report [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Deleted allocations for instance 7ae6338f-289f-415a-b261-3be2f9948572 [ 714.940735] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d1826b-7e41-4be1-b432-f868e43a4876 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.948223] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbb1ddd-d759-4001-ae3b-0b03696935f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.982005] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f9075e-d5ac-4725-89f3-8ee246da598e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.989364] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1119fd-6828-41e8-87a8-20bd4518c8a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.007387] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 715.012118] env[61905]: DEBUG nova.compute.provider_tree [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.103698] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Successfully created port: c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.224441] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd15c08c-77ee-41de-a307-ad0801dbc4c0 tempest-InstanceActionsV221TestJSON-1391701190 tempest-InstanceActionsV221TestJSON-1391701190-project-member] Lock "7ae6338f-289f-415a-b261-3be2f9948572" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.187s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.518571] env[61905]: DEBUG nova.scheduler.client.report [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.728640] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 715.840010] env[61905]: DEBUG nova.compute.manager [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Received event network-changed-c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 715.840216] env[61905]: DEBUG nova.compute.manager [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Refreshing instance network info cache due to event network-changed-c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 715.840429] env[61905]: DEBUG oslo_concurrency.lockutils [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] Acquiring lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.840569] env[61905]: DEBUG oslo_concurrency.lockutils [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] Acquired lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.840722] env[61905]: DEBUG nova.network.neutron [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Refreshing network info cache for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.025887] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 716.029774] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.034095] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.300s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.036219] env[61905]: INFO nova.compute.claims [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.067613] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 716.067911] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 716.068128] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.068361] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 716.068741] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.068970] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 716.069206] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 716.069405] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 716.069584] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 716.069745] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 716.072083] env[61905]: DEBUG nova.virt.hardware [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 716.072083] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2003863e-9a87-4a6b-acd4-0e9a3e40ecee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.076288] env[61905]: INFO nova.scheduler.client.report [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Deleted allocations for instance b9199119-9d4e-4b04-8675-22f6680da8b1 [ 716.084332] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37b9fc6-c7a7-4ac2-80d0-7b6588fb91e1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.102711] env[61905]: ERROR nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 716.102711] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 716.102711] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.102711] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.102711] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.102711] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.102711] env[61905]: ERROR nova.compute.manager raise self.value [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.102711] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 716.102711] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.102711] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 716.103473] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.103473] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 716.103473] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 716.103473] env[61905]: ERROR nova.compute.manager [ 716.103473] env[61905]: Traceback (most recent call last): [ 716.103473] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 716.103473] env[61905]: listener.cb(fileno) [ 716.103473] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 716.103473] env[61905]: result = function(*args, **kwargs) [ 716.103473] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 716.103473] env[61905]: return func(*args, **kwargs) [ 716.103473] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 716.103473] env[61905]: raise e [ 716.103473] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 716.103473] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 716.103473] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.103473] env[61905]: created_port_ids = self._update_ports_for_instance( [ 716.103473] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.103473] env[61905]: with excutils.save_and_reraise_exception(): [ 716.103473] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.103473] env[61905]: self.force_reraise() [ 716.103473] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.103473] env[61905]: raise self.value [ 716.103473] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.103473] env[61905]: updated_port = self._update_port( [ 716.103473] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.103473] env[61905]: _ensure_no_port_binding_failure(port) [ 716.103473] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.103473] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 716.105195] env[61905]: nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 716.105195] env[61905]: Removing descriptor: 17 [ 716.105195] env[61905]: ERROR nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Traceback (most recent call last): [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] yield resources [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self.driver.spawn(context, instance, image_meta, [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 716.105195] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] vm_ref = self.build_virtual_machine(instance, [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] vif_infos = vmwarevif.get_vif_info(self._session, [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] for vif in network_info: [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return self._sync_wrapper(fn, *args, **kwargs) [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self.wait() [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self[:] = self._gt.wait() [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return self._exit_event.wait() [ 716.106064] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] result = hub.switch() [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return self.greenlet.switch() [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] result = function(*args, **kwargs) [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return func(*args, **kwargs) [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] raise e [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] nwinfo = self.network_api.allocate_for_instance( [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 716.106420] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] created_port_ids = self._update_ports_for_instance( [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] with excutils.save_and_reraise_exception(): [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self.force_reraise() [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] raise self.value [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] updated_port = self._update_port( [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] _ensure_no_port_binding_failure(port) [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.106770] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] raise exception.PortBindingFailed(port_id=port['id']) [ 716.107948] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 716.107948] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] [ 716.107948] env[61905]: INFO nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Terminating instance [ 716.107948] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquiring lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.247265] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.362577] env[61905]: DEBUG nova.network.neutron [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.446551] env[61905]: DEBUG nova.network.neutron [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.587391] env[61905]: DEBUG oslo_concurrency.lockutils [None req-40d02e32-4945-4e3c-8c43-db3ac53ea2b4 tempest-ServerShowV247Test-741607956 tempest-ServerShowV247Test-741607956-project-member] Lock "b9199119-9d4e-4b04-8675-22f6680da8b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.181s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.951796] env[61905]: DEBUG oslo_concurrency.lockutils [req-1c414f09-04c3-41bd-b1bf-8588fe988706 req-7135f728-5911-4b56-8f70-228318273797 service nova] Releasing lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.951796] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquired lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.951796] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.494707] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.551600] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceea6256-dd14-4147-9e4c-b3fb9e91e207 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.560163] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9511a6cd-11ab-4f30-b109-46f750319e9d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.593977] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0243598e-0eee-4ae6-8ba7-dff12fb551a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.601885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824982c8-c7b3-43af-8a0b-e74a6d102d33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.615595] env[61905]: DEBUG nova.compute.provider_tree [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.618054] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.878156] env[61905]: DEBUG nova.compute.manager [req-bec6129c-4b34-4975-b96a-9b64b72ddca9 req-10045ce2-8e3f-4e85-8d2c-fe7510bfb73b service nova] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Received event network-vif-deleted-c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 718.122433] env[61905]: DEBUG nova.scheduler.client.report [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 718.125829] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Releasing lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.126226] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 718.126416] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 718.126931] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a77e3828-ed74-4c08-ad99-5d20d064eb60 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.136224] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6940bae3-ab19-4470-983b-0d1dd6290df9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.158456] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef6e5c2c-1778-4079-ae35-55f9264a060d could not be found. [ 718.158659] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 718.158833] env[61905]: INFO nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 718.159081] env[61905]: DEBUG oslo.service.loopingcall [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 718.159290] env[61905]: DEBUG nova.compute.manager [-] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 718.159386] env[61905]: DEBUG nova.network.neutron [-] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 718.172641] env[61905]: DEBUG nova.network.neutron [-] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 718.627937] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.628545] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 718.632278] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.440s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.632498] env[61905]: DEBUG nova.objects.instance [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lazy-loading 'resources' on Instance uuid 84428003-72b1-467a-baf5-06ac37205622 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 718.675044] env[61905]: DEBUG nova.network.neutron [-] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.134217] env[61905]: DEBUG nova.compute.utils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 719.138264] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 719.138439] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.177971] env[61905]: INFO nova.compute.manager [-] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Took 1.02 seconds to deallocate network for instance. [ 719.180072] env[61905]: DEBUG nova.compute.claims [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 719.180851] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.188832] env[61905]: DEBUG nova.policy [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '745fac95ee5c4e2bb990afe433e9cc73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1682cd0ceccc412ea3313e8b1727aadf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 719.507569] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Successfully created port: 5455749f-be47-4556-add4-b7cf3cd4822a {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.548096] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58dc42e-03b2-4dee-b0bd-337b0479051f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.557219] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86fac99-525e-49ac-bc99-5bf4a275f894 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.592893] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60898db2-f1c7-4300-8ba8-e54a66724cc2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.601353] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b3c793-548d-459b-82cd-7b2e73fcb68b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.617010] env[61905]: DEBUG nova.compute.provider_tree [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.641144] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 720.120358] env[61905]: DEBUG nova.scheduler.client.report [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 720.331120] env[61905]: DEBUG nova.compute.manager [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Received event network-changed-5455749f-be47-4556-add4-b7cf3cd4822a {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 720.331316] env[61905]: DEBUG nova.compute.manager [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Refreshing instance network info cache due to event network-changed-5455749f-be47-4556-add4-b7cf3cd4822a. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 720.331529] env[61905]: DEBUG oslo_concurrency.lockutils [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] Acquiring lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.331668] env[61905]: DEBUG oslo_concurrency.lockutils [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] Acquired lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.332372] env[61905]: DEBUG nova.network.neutron [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Refreshing network info cache for port 5455749f-be47-4556-add4-b7cf3cd4822a {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 720.494650] env[61905]: ERROR nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 720.494650] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.494650] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.494650] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.494650] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.494650] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.494650] env[61905]: ERROR nova.compute.manager raise self.value [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.494650] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 720.494650] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.494650] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 720.495413] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.495413] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 720.495413] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 720.495413] env[61905]: ERROR nova.compute.manager [ 720.495413] env[61905]: Traceback (most recent call last): [ 720.495413] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 720.495413] env[61905]: listener.cb(fileno) [ 720.495413] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.495413] env[61905]: result = function(*args, **kwargs) [ 720.495413] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.495413] env[61905]: return func(*args, **kwargs) [ 720.495413] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.495413] env[61905]: raise e [ 720.495413] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.495413] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 720.495413] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.495413] env[61905]: created_port_ids = self._update_ports_for_instance( [ 720.495413] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.495413] env[61905]: with excutils.save_and_reraise_exception(): [ 720.495413] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.495413] env[61905]: self.force_reraise() [ 720.495413] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.495413] env[61905]: raise self.value [ 720.495413] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.495413] env[61905]: updated_port = self._update_port( [ 720.495413] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.495413] env[61905]: _ensure_no_port_binding_failure(port) [ 720.495413] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.495413] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 720.496637] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 720.496637] env[61905]: Removing descriptor: 17 [ 720.624936] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.993s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.627782] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.739s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.648810] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 720.651549] env[61905]: INFO nova.scheduler.client.report [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Deleted allocations for instance 84428003-72b1-467a-baf5-06ac37205622 [ 720.677573] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 720.678534] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 720.678534] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 720.678534] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 720.678534] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 720.678716] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 720.678896] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 720.679086] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 720.679303] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 720.679573] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 720.679794] env[61905]: DEBUG nova.virt.hardware [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 720.680758] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e16a6d-7ed1-4610-81c4-9cea812bc49f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.689992] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625998a5-e0b0-4306-acb9-95b389280899 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.706039] env[61905]: ERROR nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Traceback (most recent call last): [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] yield resources [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self.driver.spawn(context, instance, image_meta, [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self._vmops.spawn(context, instance, image_meta, injected_files, [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] vm_ref = self.build_virtual_machine(instance, [ 720.706039] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] vif_infos = vmwarevif.get_vif_info(self._session, [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] for vif in network_info: [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] return self._sync_wrapper(fn, *args, **kwargs) [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self.wait() [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self[:] = self._gt.wait() [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] return self._exit_event.wait() [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 720.706502] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] current.throw(*self._exc) [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] result = function(*args, **kwargs) [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] return func(*args, **kwargs) [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] raise e [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] nwinfo = self.network_api.allocate_for_instance( [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] created_port_ids = self._update_ports_for_instance( [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] with excutils.save_and_reraise_exception(): [ 720.707048] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self.force_reraise() [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] raise self.value [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] updated_port = self._update_port( [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] _ensure_no_port_binding_failure(port) [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] raise exception.PortBindingFailed(port_id=port['id']) [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 720.707423] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] [ 720.707423] env[61905]: INFO nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Terminating instance [ 720.708730] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquiring lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.849362] env[61905]: DEBUG nova.network.neutron [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 720.938197] env[61905]: DEBUG nova.network.neutron [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.159799] env[61905]: DEBUG oslo_concurrency.lockutils [None req-391a1a76-9583-4bd8-8cfd-de03297253d8 tempest-ServersAaction247Test-823719857 tempest-ServersAaction247Test-823719857-project-member] Lock "84428003-72b1-467a-baf5-06ac37205622" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.707s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.442034] env[61905]: DEBUG oslo_concurrency.lockutils [req-77a2e338-ba2c-4b6d-a94e-69ea99183f86 req-f833b89a-c0c9-459f-884c-977b4a266e88 service nova] Releasing lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 721.442034] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquired lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.442034] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.487611] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4410578d-d032-4733-91dd-d9a2cda3c34a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.495382] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8378a5-ca7c-4b77-bd5d-30682c75626d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.524942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b041212-5e1a-4d77-8225-37cc74d110da {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.534211] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3caa71-44fc-4902-bc63-1ed4a67a023b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.547435] env[61905]: DEBUG nova.compute.provider_tree [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.961172] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.050329] env[61905]: DEBUG nova.scheduler.client.report [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.054399] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.359935] env[61905]: DEBUG nova.compute.manager [req-cb1572ad-4956-4f95-a326-6ee363dc8ff9 req-b45dd34f-381c-425b-8def-843fa7bca7c8 service nova] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Received event network-vif-deleted-5455749f-be47-4556-add4-b7cf3cd4822a {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 722.557691] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.558624] env[61905]: ERROR nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Traceback (most recent call last): [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self.driver.spawn(context, instance, image_meta, [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] vm_ref = self.build_virtual_machine(instance, [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] vif_infos = vmwarevif.get_vif_info(self._session, [ 722.558624] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] for vif in network_info: [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] return self._sync_wrapper(fn, *args, **kwargs) [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self.wait() [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self[:] = self._gt.wait() [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] return self._exit_event.wait() [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] current.throw(*self._exc) [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 722.558967] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] result = function(*args, **kwargs) [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] return func(*args, **kwargs) [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] raise e [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] nwinfo = self.network_api.allocate_for_instance( [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] created_port_ids = self._update_ports_for_instance( [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] with excutils.save_and_reraise_exception(): [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] self.force_reraise() [ 722.559597] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] raise self.value [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] updated_port = self._update_port( [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] _ensure_no_port_binding_failure(port) [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] raise exception.PortBindingFailed(port_id=port['id']) [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] nova.exception.PortBindingFailed: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. [ 722.560167] env[61905]: ERROR nova.compute.manager [instance: 7e393163-cd68-4de2-8051-7ec10415e508] [ 722.560167] env[61905]: DEBUG nova.compute.utils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 722.561942] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.669s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.561942] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.562095] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 722.562288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.218s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.563749] env[61905]: INFO nova.compute.claims [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.566401] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Releasing lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.566764] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 722.566961] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.567631] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Build of instance 7e393163-cd68-4de2-8051-7ec10415e508 was re-scheduled: Binding failed for port 1700f4ea-6978-4247-ade7-87777bf566df, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 722.569357] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 722.569357] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.569357] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.569357] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.569967] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c056a6af-34fe-4a45-baa9-dd9289c1bc28 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.572877] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f443a0e-c866-457d-a968-9363f7929834 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.581028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a9132e-76c6-485f-b5fc-fb6c9e8610eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.587661] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc3a9ba-08eb-4a0b-8a90-2828eef289fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.610896] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f030607b-1394-45f7-9a14-09597a412326 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.613538] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d2cb485-32da-4fe7-8462-d98c071a6310 could not be found. [ 722.613741] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 722.613900] env[61905]: INFO nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Took 0.05 seconds to destroy the instance on the hypervisor. [ 722.614142] env[61905]: DEBUG oslo.service.loopingcall [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 722.614352] env[61905]: DEBUG nova.compute.manager [-] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 722.614443] env[61905]: DEBUG nova.network.neutron [-] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.621572] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3e42f2-17d0-4f10-8c76-58cad036f77a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.653027] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181365MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 722.653170] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.654032] env[61905]: DEBUG nova.network.neutron [-] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.100631] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.156302] env[61905]: DEBUG nova.network.neutron [-] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.179372] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.659103] env[61905]: INFO nova.compute.manager [-] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Took 1.04 seconds to deallocate network for instance. [ 723.661260] env[61905]: DEBUG nova.compute.claims [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 723.661436] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.681995] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-7e393163-cd68-4de2-8051-7ec10415e508" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.682230] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 723.682404] env[61905]: DEBUG nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 723.682566] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.698035] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.902605] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedfd070-f0be-480a-9ce3-f4cd8b2af9aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.910230] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4c2ef7-d443-441e-808d-17e24edf29af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.940036] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98942a4-123b-4162-b477-f11e9187ffa5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.947027] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6339ad-c7a1-4ccf-891b-6bc46b16b795 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.960295] env[61905]: DEBUG nova.compute.provider_tree [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.201633] env[61905]: DEBUG nova.network.neutron [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.464643] env[61905]: DEBUG nova.scheduler.client.report [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.705182] env[61905]: INFO nova.compute.manager [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 7e393163-cd68-4de2-8051-7ec10415e508] Took 1.02 seconds to deallocate network for instance. [ 724.971433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.971433] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 724.973560] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.448s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.974455] env[61905]: INFO nova.compute.claims [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.479367] env[61905]: DEBUG nova.compute.utils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.483044] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 725.483044] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.534707] env[61905]: DEBUG nova.policy [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1585520514c74fc687d343257f5d67c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b23177e0ffc34ebe982c6cb169394409', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 725.742880] env[61905]: INFO nova.scheduler.client.report [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocations for instance 7e393163-cd68-4de2-8051-7ec10415e508 [ 725.886530] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Successfully created port: b91af5df-97a5-4bae-bd1d-5b742bc89c07 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.984498] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 726.250878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-869baa85-f6da-42d7-880c-767c19d21e9f tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "7e393163-cd68-4de2-8051-7ec10415e508" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.802s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.396916] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ed850d-1a27-4922-a1a6-1d3e7527596f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.409923] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27b245c-a942-41e2-adb9-c018dc2f6c19 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.445135] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f510b19-56cc-4754-ad51-10dba21e2af2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.452849] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fd6df5-8d63-4669-9257-17a0ae471b67 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.474603] env[61905]: DEBUG nova.compute.provider_tree [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.494181] env[61905]: INFO nova.virt.block_device [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Booting with volume 063a2314-3f6f-4bc2-8d51-8d966a9d7c6b at /dev/sda [ 726.539106] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7934ab69-ae07-4c75-977f-def29727eecf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.547742] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a41eb6-d3ce-4808-9d72-0865e570253b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.575264] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27d6040f-6180-4237-ac58-0f669a6ee2ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.581090] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66252b2-399d-4b57-a4a0-0c5897f213dd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.606920] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2d3aac-4999-45a6-bbed-e3c32de49918 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.611126] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19482391-3b4d-44e2-b305-29c4feffdd90 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.623435] env[61905]: DEBUG nova.virt.block_device [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Updating existing volume attachment record: f377c147-4202-4f00-9ff6-3731963b2870 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 726.754366] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 726.977981] env[61905]: DEBUG nova.scheduler.client.report [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.032919] env[61905]: DEBUG nova.compute.manager [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Received event network-changed-b91af5df-97a5-4bae-bd1d-5b742bc89c07 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.032919] env[61905]: DEBUG nova.compute.manager [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Refreshing instance network info cache due to event network-changed-b91af5df-97a5-4bae-bd1d-5b742bc89c07. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 727.034265] env[61905]: DEBUG oslo_concurrency.lockutils [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] Acquiring lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.034265] env[61905]: DEBUG oslo_concurrency.lockutils [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] Acquired lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.034265] env[61905]: DEBUG nova.network.neutron [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Refreshing network info cache for port b91af5df-97a5-4bae-bd1d-5b742bc89c07 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.257629] env[61905]: ERROR nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 727.257629] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.257629] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.257629] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.257629] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.257629] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.257629] env[61905]: ERROR nova.compute.manager raise self.value [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.257629] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 727.257629] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.257629] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 727.258121] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.258121] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 727.258121] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 727.258121] env[61905]: ERROR nova.compute.manager [ 727.258121] env[61905]: Traceback (most recent call last): [ 727.258121] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 727.258121] env[61905]: listener.cb(fileno) [ 727.258121] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.258121] env[61905]: result = function(*args, **kwargs) [ 727.258121] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 727.258121] env[61905]: return func(*args, **kwargs) [ 727.258121] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 727.258121] env[61905]: raise e [ 727.258121] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 727.258121] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 727.258121] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.258121] env[61905]: created_port_ids = self._update_ports_for_instance( [ 727.258121] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.258121] env[61905]: with excutils.save_and_reraise_exception(): [ 727.258121] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.258121] env[61905]: self.force_reraise() [ 727.258121] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.258121] env[61905]: raise self.value [ 727.258121] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.258121] env[61905]: updated_port = self._update_port( [ 727.258121] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.258121] env[61905]: _ensure_no_port_binding_failure(port) [ 727.258121] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.258121] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 727.259188] env[61905]: nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 727.259188] env[61905]: Removing descriptor: 17 [ 727.273053] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.430730] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.430963] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.485608] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.486291] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 727.488701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.021s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.490048] env[61905]: INFO nova.compute.claims [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.556279] env[61905]: DEBUG nova.network.neutron [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.634882] env[61905]: DEBUG nova.network.neutron [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.994792] env[61905]: DEBUG nova.compute.utils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.997655] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 727.997828] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 728.042210] env[61905]: DEBUG nova.policy [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed3c528fd7e14bae99a9ff4dc7fedb31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e07b5c2bda047b5a7045d0ae5263d6a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 728.137684] env[61905]: DEBUG oslo_concurrency.lockutils [req-a78231d9-a0f9-49d2-bf72-0acf6c70da3b req-0e4880e5-050b-4010-8e8e-83776abb8e8a service nova] Releasing lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.296805] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Successfully created port: 8a7a926c-a420-4fb7-9add-5294d3e29711 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.512948] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 728.723730] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 728.724243] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 728.724243] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 728.724243] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.724451] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 728.724451] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.724591] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 728.724780] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 728.725476] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 728.725476] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 728.725476] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 728.725645] env[61905]: DEBUG nova.virt.hardware [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.726566] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8912357-22eb-418f-9cde-90681ddfb774 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.737332] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1015bb46-66f4-4187-8d31-b12abf448232 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.754569] env[61905]: ERROR nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Traceback (most recent call last): [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] yield resources [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self.driver.spawn(context, instance, image_meta, [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] vm_ref = self.build_virtual_machine(instance, [ 728.754569] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] vif_infos = vmwarevif.get_vif_info(self._session, [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] for vif in network_info: [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] return self._sync_wrapper(fn, *args, **kwargs) [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self.wait() [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self[:] = self._gt.wait() [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] return self._exit_event.wait() [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 728.755087] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] current.throw(*self._exc) [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] result = function(*args, **kwargs) [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] return func(*args, **kwargs) [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] raise e [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] nwinfo = self.network_api.allocate_for_instance( [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] created_port_ids = self._update_ports_for_instance( [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] with excutils.save_and_reraise_exception(): [ 728.756018] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self.force_reraise() [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] raise self.value [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] updated_port = self._update_port( [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] _ensure_no_port_binding_failure(port) [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] raise exception.PortBindingFailed(port_id=port['id']) [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 728.756583] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] [ 728.756583] env[61905]: INFO nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Terminating instance [ 728.757205] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquiring lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.757205] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquired lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.757315] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.910051] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1be86a-298f-4db5-adb1-83d962826449 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.919111] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72603f8-6b61-402f-9295-c11831d177ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.951743] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ddc1a8-d84a-42d2-8a01-9560bca09f44 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.959367] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5e0906-a5cd-45b0-a78b-795007c7ff34 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.972915] env[61905]: DEBUG nova.compute.provider_tree [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.061906] env[61905]: DEBUG nova.compute.manager [req-e6420e04-4bcf-4d1e-bf70-d055e4054a98 req-89f0207d-0202-44c7-865e-40aa85a902c7 service nova] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Received event network-vif-deleted-b91af5df-97a5-4bae-bd1d-5b742bc89c07 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 729.282878] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.357401] env[61905]: ERROR nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 729.357401] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.357401] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.357401] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.357401] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.357401] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.357401] env[61905]: ERROR nova.compute.manager raise self.value [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.357401] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 729.357401] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.357401] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 729.358066] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.358066] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 729.358066] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 729.358066] env[61905]: ERROR nova.compute.manager [ 729.358066] env[61905]: Traceback (most recent call last): [ 729.358066] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 729.358066] env[61905]: listener.cb(fileno) [ 729.358066] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.358066] env[61905]: result = function(*args, **kwargs) [ 729.358066] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.358066] env[61905]: return func(*args, **kwargs) [ 729.358066] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.358066] env[61905]: raise e [ 729.358066] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.358066] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 729.358066] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.358066] env[61905]: created_port_ids = self._update_ports_for_instance( [ 729.358066] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.358066] env[61905]: with excutils.save_and_reraise_exception(): [ 729.358066] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.358066] env[61905]: self.force_reraise() [ 729.358066] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.358066] env[61905]: raise self.value [ 729.358066] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.358066] env[61905]: updated_port = self._update_port( [ 729.358066] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.358066] env[61905]: _ensure_no_port_binding_failure(port) [ 729.358066] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.358066] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 729.359296] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 729.359296] env[61905]: Removing descriptor: 17 [ 729.385074] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.477035] env[61905]: DEBUG nova.scheduler.client.report [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.522190] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 729.544735] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 729.544977] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 729.545152] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.545338] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 729.545482] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.545690] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 729.545927] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 729.546097] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 729.546264] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 729.546423] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 729.546586] env[61905]: DEBUG nova.virt.hardware [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 729.547509] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c456f4b7-8e44-43c9-94f7-2eea285b2afc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.555454] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7374eb47-bb7c-42f2-8bd6-503682cdd264 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.569748] env[61905]: ERROR nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Traceback (most recent call last): [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] yield resources [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self.driver.spawn(context, instance, image_meta, [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] vm_ref = self.build_virtual_machine(instance, [ 729.569748] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] vif_infos = vmwarevif.get_vif_info(self._session, [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] for vif in network_info: [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] return self._sync_wrapper(fn, *args, **kwargs) [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self.wait() [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self[:] = self._gt.wait() [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] return self._exit_event.wait() [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 729.570170] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] current.throw(*self._exc) [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] result = function(*args, **kwargs) [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] return func(*args, **kwargs) [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] raise e [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] nwinfo = self.network_api.allocate_for_instance( [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] created_port_ids = self._update_ports_for_instance( [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] with excutils.save_and_reraise_exception(): [ 729.570578] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self.force_reraise() [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] raise self.value [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] updated_port = self._update_port( [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] _ensure_no_port_binding_failure(port) [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] raise exception.PortBindingFailed(port_id=port['id']) [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 729.570993] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] [ 729.570993] env[61905]: INFO nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Terminating instance [ 729.572049] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquiring lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.572211] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquired lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.572372] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.887906] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Releasing lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.888662] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 729.888944] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35c895a3-1c89-4b42-bf65-22b192586822 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.898954] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be4137f-836d-4c39-9c94-49e8dc27207d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.920292] env[61905]: WARNING nova.virt.vmwareapi.driver [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 6783654c-4f87-4353-b9ba-1299158eba3a could not be found. [ 729.920500] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.920756] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cc97db2-74ea-4cc0-b361-955638ce4cdb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.928179] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d7ae73-646c-4cb2-a694-b4a03ead4031 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.949342] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6783654c-4f87-4353-b9ba-1299158eba3a could not be found. [ 729.949539] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.949717] env[61905]: INFO nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Took 0.06 seconds to destroy the instance on the hypervisor. [ 729.949947] env[61905]: DEBUG oslo.service.loopingcall [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.950183] env[61905]: DEBUG nova.compute.manager [-] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.950295] env[61905]: DEBUG nova.network.neutron [-] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.964818] env[61905]: DEBUG nova.network.neutron [-] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.981677] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.982190] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 729.984573] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.878s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.089893] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.175583] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.467839] env[61905]: DEBUG nova.network.neutron [-] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.489303] env[61905]: DEBUG nova.compute.utils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 730.494126] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 730.494126] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.535224] env[61905]: DEBUG nova.policy [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '607efe9546ea4925b40425a536d6ae81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9c343ce0dc043d4b39b04dc6bdc70aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 730.678033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Releasing lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.678529] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 730.678735] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 730.681446] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c41f462-dad2-4986-a129-55031c22dc71 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.690863] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74cc98b1-a296-416b-be27-e632d671e579 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.716196] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24f832e6-9704-4105-a17f-c5c77fa52d74 could not be found. [ 730.716475] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.716687] env[61905]: INFO nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Took 0.04 seconds to destroy the instance on the hypervisor. [ 730.716975] env[61905]: DEBUG oslo.service.loopingcall [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.719574] env[61905]: DEBUG nova.compute.manager [-] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 730.719725] env[61905]: DEBUG nova.network.neutron [-] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 730.741494] env[61905]: DEBUG nova.network.neutron [-] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 730.887974] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Successfully created port: 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.891761] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f125d7-b066-48e2-b69b-6d8a21100701 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.899247] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8209465-6c61-4f4b-bfe0-c25dc74de1d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.931030] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfa8ad4-877d-4d4d-bf95-e5eb6d30e831 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.939383] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c0927c-8eb9-4e8a-a4f9-1e8480c3c598 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.952312] env[61905]: DEBUG nova.compute.provider_tree [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.970448] env[61905]: INFO nova.compute.manager [-] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Took 1.02 seconds to deallocate network for instance. [ 730.994629] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 731.117329] env[61905]: DEBUG nova.compute.manager [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Received event network-changed-8a7a926c-a420-4fb7-9add-5294d3e29711 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 731.117329] env[61905]: DEBUG nova.compute.manager [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Refreshing instance network info cache due to event network-changed-8a7a926c-a420-4fb7-9add-5294d3e29711. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 731.117329] env[61905]: DEBUG oslo_concurrency.lockutils [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] Acquiring lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.117329] env[61905]: DEBUG oslo_concurrency.lockutils [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] Acquired lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.117329] env[61905]: DEBUG nova.network.neutron [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Refreshing network info cache for port 8a7a926c-a420-4fb7-9add-5294d3e29711 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 731.244560] env[61905]: DEBUG nova.network.neutron [-] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.455993] env[61905]: DEBUG nova.scheduler.client.report [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.534676] env[61905]: INFO nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Took 0.56 seconds to detach 1 volumes for instance. [ 731.537192] env[61905]: DEBUG nova.compute.claims [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.537386] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.641306] env[61905]: DEBUG nova.network.neutron [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.752090] env[61905]: INFO nova.compute.manager [-] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Took 1.03 seconds to deallocate network for instance. [ 731.758149] env[61905]: DEBUG nova.compute.claims [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.758149] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.769681] env[61905]: DEBUG nova.network.neutron [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.919353] env[61905]: ERROR nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 731.919353] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.919353] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.919353] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.919353] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.919353] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.919353] env[61905]: ERROR nova.compute.manager raise self.value [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.919353] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 731.919353] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.919353] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 731.920233] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.920233] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 731.920233] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 731.920233] env[61905]: ERROR nova.compute.manager [ 731.920233] env[61905]: Traceback (most recent call last): [ 731.920233] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 731.920233] env[61905]: listener.cb(fileno) [ 731.920233] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.920233] env[61905]: result = function(*args, **kwargs) [ 731.920233] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.920233] env[61905]: return func(*args, **kwargs) [ 731.920233] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.920233] env[61905]: raise e [ 731.920233] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.920233] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 731.920233] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.920233] env[61905]: created_port_ids = self._update_ports_for_instance( [ 731.920233] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.920233] env[61905]: with excutils.save_and_reraise_exception(): [ 731.920233] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.920233] env[61905]: self.force_reraise() [ 731.920233] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.920233] env[61905]: raise self.value [ 731.920233] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.920233] env[61905]: updated_port = self._update_port( [ 731.920233] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.920233] env[61905]: _ensure_no_port_binding_failure(port) [ 731.920233] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.920233] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 731.921137] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 731.921137] env[61905]: Removing descriptor: 18 [ 731.962703] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.978s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.963341] env[61905]: ERROR nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Traceback (most recent call last): [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self.driver.spawn(context, instance, image_meta, [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] vm_ref = self.build_virtual_machine(instance, [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.963341] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] for vif in network_info: [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return self._sync_wrapper(fn, *args, **kwargs) [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self.wait() [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self[:] = self._gt.wait() [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return self._exit_event.wait() [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] result = hub.switch() [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 731.963696] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return self.greenlet.switch() [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] result = function(*args, **kwargs) [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] return func(*args, **kwargs) [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] raise e [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] nwinfo = self.network_api.allocate_for_instance( [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] created_port_ids = self._update_ports_for_instance( [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] with excutils.save_and_reraise_exception(): [ 731.964100] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] self.force_reraise() [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] raise self.value [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] updated_port = self._update_port( [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] _ensure_no_port_binding_failure(port) [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] raise exception.PortBindingFailed(port_id=port['id']) [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] nova.exception.PortBindingFailed: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. [ 731.964489] env[61905]: ERROR nova.compute.manager [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] [ 731.964829] env[61905]: DEBUG nova.compute.utils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 731.965240] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.216s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.968370] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Build of instance c7e66b30-f72d-4afd-aded-4a92dd19b388 was re-scheduled: Binding failed for port df3f464d-8204-4dcf-aa3e-bdb12757450c, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 731.968591] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 731.968776] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquiring lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.968917] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Acquired lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.969084] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.004964] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 732.027541] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 732.027781] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 732.027927] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.028127] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 732.028298] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.028442] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 732.028646] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 732.028800] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 732.028958] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 732.029160] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 732.029342] env[61905]: DEBUG nova.virt.hardware [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 732.030588] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5ceed7-8c69-4e2d-a964-09eea4358d49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.037946] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eee4e9-4365-4aa3-b3d3-bcb5e9b66a4b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.051168] env[61905]: ERROR nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Traceback (most recent call last): [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] yield resources [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self.driver.spawn(context, instance, image_meta, [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] vm_ref = self.build_virtual_machine(instance, [ 732.051168] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] for vif in network_info: [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] return self._sync_wrapper(fn, *args, **kwargs) [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self.wait() [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self[:] = self._gt.wait() [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] return self._exit_event.wait() [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 732.051647] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] current.throw(*self._exc) [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] result = function(*args, **kwargs) [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] return func(*args, **kwargs) [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] raise e [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] nwinfo = self.network_api.allocate_for_instance( [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] created_port_ids = self._update_ports_for_instance( [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] with excutils.save_and_reraise_exception(): [ 732.052090] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self.force_reraise() [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] raise self.value [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] updated_port = self._update_port( [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] _ensure_no_port_binding_failure(port) [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] raise exception.PortBindingFailed(port_id=port['id']) [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 732.052496] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] [ 732.052496] env[61905]: INFO nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Terminating instance [ 732.053418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.053560] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.053721] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.271980] env[61905]: DEBUG oslo_concurrency.lockutils [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] Releasing lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.272243] env[61905]: DEBUG nova.compute.manager [req-e5677dd3-42fa-467d-b2fc-b2703d047825 req-27fc9ade-d665-413c-86d6-f05f4207e896 service nova] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Received event network-vif-deleted-8a7a926c-a420-4fb7-9add-5294d3e29711 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 732.491376] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.583131] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.618448] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.679720] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.828948] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdec349e-2708-4352-b114-fab28bfccf8f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.837033] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5857b7-72d9-46d4-9ba3-baba774fec31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.867395] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d40fc3d-f52a-45a1-b770-51c5ae2037e0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.874783] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493114a5-f3c1-4fc1-a3eb-58df6935d836 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.887654] env[61905]: DEBUG nova.compute.provider_tree [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.121239] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Releasing lock "refresh_cache-c7e66b30-f72d-4afd-aded-4a92dd19b388" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.121571] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 733.121688] env[61905]: DEBUG nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 733.121827] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.137357] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.140817] env[61905]: DEBUG nova.compute.manager [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Received event network-changed-6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.140985] env[61905]: DEBUG nova.compute.manager [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Refreshing instance network info cache due to event network-changed-6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 733.141338] env[61905]: DEBUG oslo_concurrency.lockutils [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] Acquiring lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.184195] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Releasing lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.184620] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 733.184805] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.185111] env[61905]: DEBUG oslo_concurrency.lockutils [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] Acquired lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.185723] env[61905]: DEBUG nova.network.neutron [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Refreshing network info cache for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.187461] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-481783a9-3040-425b-8577-14de12e50dd7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.196285] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03861c7-4008-4c34-b34f-9a7b351f2864 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.217454] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5bdd7f80-e321-475f-8132-7047a7f24c75 could not be found. [ 733.217660] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 733.217830] env[61905]: INFO nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Took 0.03 seconds to destroy the instance on the hypervisor. [ 733.218076] env[61905]: DEBUG oslo.service.loopingcall [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 733.218280] env[61905]: DEBUG nova.compute.manager [-] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 733.218368] env[61905]: DEBUG nova.network.neutron [-] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.234438] env[61905]: DEBUG nova.network.neutron [-] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.390624] env[61905]: DEBUG nova.scheduler.client.report [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.642499] env[61905]: DEBUG nova.network.neutron [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.704755] env[61905]: DEBUG nova.network.neutron [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.737048] env[61905]: DEBUG nova.network.neutron [-] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.786222] env[61905]: DEBUG nova.network.neutron [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.895427] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.896075] env[61905]: ERROR nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Traceback (most recent call last): [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self.driver.spawn(context, instance, image_meta, [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] vm_ref = self.build_virtual_machine(instance, [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.896075] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] for vif in network_info: [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return self._sync_wrapper(fn, *args, **kwargs) [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self.wait() [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self[:] = self._gt.wait() [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return self._exit_event.wait() [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] result = hub.switch() [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.896442] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return self.greenlet.switch() [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] result = function(*args, **kwargs) [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] return func(*args, **kwargs) [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] raise e [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] nwinfo = self.network_api.allocate_for_instance( [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] created_port_ids = self._update_ports_for_instance( [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] with excutils.save_and_reraise_exception(): [ 733.896828] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] self.force_reraise() [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] raise self.value [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] updated_port = self._update_port( [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] _ensure_no_port_binding_failure(port) [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] raise exception.PortBindingFailed(port_id=port['id']) [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] nova.exception.PortBindingFailed: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. [ 733.897233] env[61905]: ERROR nova.compute.manager [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] [ 733.897601] env[61905]: DEBUG nova.compute.utils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.898082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.119s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.900892] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Build of instance 0a1e2a21-a43d-4363-9f1f-683e35d199aa was re-scheduled: Binding failed for port 281dd8bc-eac0-44e0-8975-9c6f1182642c, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 733.901304] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 733.901526] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.901670] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquired lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.901833] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.144873] env[61905]: INFO nova.compute.manager [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] [instance: c7e66b30-f72d-4afd-aded-4a92dd19b388] Took 1.02 seconds to deallocate network for instance. [ 734.240531] env[61905]: INFO nova.compute.manager [-] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Took 1.02 seconds to deallocate network for instance. [ 734.242743] env[61905]: DEBUG nova.compute.claims [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 734.242911] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.289060] env[61905]: DEBUG oslo_concurrency.lockutils [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] Releasing lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.289294] env[61905]: DEBUG nova.compute.manager [req-75f5c995-420d-4a88-aaaf-5bd42d796084 req-fe37766c-cb66-4541-9ec6-d9fb6e9c2214 service nova] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Received event network-vif-deleted-6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 734.566780] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.653253] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.766486] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b646b4a0-e253-4c93-92e0-3943fafa471e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.774267] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72c5377-391f-4e6b-a08b-749ee967670c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.804030] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2a393b-3f4f-407e-a7b3-1f9cd469d7e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.810635] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81a95c8-f688-4613-9d15-c27b937207ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.823518] env[61905]: DEBUG nova.compute.provider_tree [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.155701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Releasing lock "refresh_cache-0a1e2a21-a43d-4363-9f1f-683e35d199aa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.155989] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 735.156102] env[61905]: DEBUG nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 735.156285] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.170517] env[61905]: INFO nova.scheduler.client.report [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Deleted allocations for instance c7e66b30-f72d-4afd-aded-4a92dd19b388 [ 735.178828] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.327155] env[61905]: DEBUG nova.scheduler.client.report [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 735.679045] env[61905]: DEBUG oslo_concurrency.lockutils [None req-2a097e19-fe19-441c-b66d-d913c972cf95 tempest-ServerPasswordTestJSON-523692241 tempest-ServerPasswordTestJSON-523692241-project-member] Lock "c7e66b30-f72d-4afd-aded-4a92dd19b388" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.752s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.681201] env[61905]: DEBUG nova.network.neutron [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.831707] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.933s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.832390] env[61905]: ERROR nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Traceback (most recent call last): [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self.driver.spawn(context, instance, image_meta, [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] vm_ref = self.build_virtual_machine(instance, [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] vif_infos = vmwarevif.get_vif_info(self._session, [ 735.832390] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] for vif in network_info: [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return self._sync_wrapper(fn, *args, **kwargs) [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self.wait() [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self[:] = self._gt.wait() [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return self._exit_event.wait() [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] result = hub.switch() [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 735.832731] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return self.greenlet.switch() [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] result = function(*args, **kwargs) [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] return func(*args, **kwargs) [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] raise e [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] nwinfo = self.network_api.allocate_for_instance( [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] created_port_ids = self._update_ports_for_instance( [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] with excutils.save_and_reraise_exception(): [ 735.833084] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] self.force_reraise() [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] raise self.value [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] updated_port = self._update_port( [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] _ensure_no_port_binding_failure(port) [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] raise exception.PortBindingFailed(port_id=port['id']) [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] nova.exception.PortBindingFailed: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. [ 735.833429] env[61905]: ERROR nova.compute.manager [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] [ 735.833771] env[61905]: DEBUG nova.compute.utils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 735.834567] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.587s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.836203] env[61905]: INFO nova.compute.claims [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.838783] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Build of instance bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3 was re-scheduled: Binding failed for port 5cb52e1a-9d18-439a-a392-5f279c39779d, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 735.839214] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 735.839444] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquiring lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.839585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Acquired lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.839741] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.181239] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 736.186116] env[61905]: INFO nova.compute.manager [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: 0a1e2a21-a43d-4363-9f1f-683e35d199aa] Took 1.03 seconds to deallocate network for instance. [ 736.368640] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.451022] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.720420] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.960337] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Releasing lock "refresh_cache-bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.960337] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 736.960337] env[61905]: DEBUG nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 736.960337] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.996811] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.227561] env[61905]: INFO nova.scheduler.client.report [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Deleted allocations for instance 0a1e2a21-a43d-4363-9f1f-683e35d199aa [ 737.324747] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39a40b9-0902-424f-8c2c-4b646996ae00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.332491] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f0e856-e056-4a01-944a-2921154f98de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.363132] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a34655-5560-4141-a3fa-e0983484f6ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.370696] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a868646-0984-48c0-9630-362ca90a99b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.384186] env[61905]: DEBUG nova.compute.provider_tree [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.500235] env[61905]: DEBUG nova.network.neutron [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.737686] env[61905]: DEBUG oslo_concurrency.lockutils [None req-634fae66-ac15-4f25-8219-51f08ecdd939 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "0a1e2a21-a43d-4363-9f1f-683e35d199aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.538s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.887596] env[61905]: DEBUG nova.scheduler.client.report [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.002760] env[61905]: INFO nova.compute.manager [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] [instance: bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3] Took 1.04 seconds to deallocate network for instance. [ 738.240472] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 738.392949] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.393530] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 738.395998] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.216s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.760439] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.903456] env[61905]: DEBUG nova.compute.utils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.904805] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 738.904974] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.953343] env[61905]: DEBUG nova.policy [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '438c5213199a4a54aeff07a731d74ae8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ac83d8a7ad046b585835e3cd1024c2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.032903] env[61905]: INFO nova.scheduler.client.report [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Deleted allocations for instance bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3 [ 739.226789] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e50df6-d200-4aae-a8cc-58a5a02b4c51 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.234898] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5df716-7c3f-465c-bb99-196abf9f4dfd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.264298] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Successfully created port: 38cfc58f-147f-4c07-b58b-0a1924ced7ca {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.266656] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46651d4f-bf56-4ecb-8b50-87a166042839 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.274709] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88a7df8-2ddd-4bc2-8480-34cf53254b33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.287632] env[61905]: DEBUG nova.compute.provider_tree [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.410344] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 739.542444] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6e4e4c01-0db8-4de5-a6ad-fc741beae869 tempest-ServerRescueNegativeTestJSON-1697858003 tempest-ServerRescueNegativeTestJSON-1697858003-project-member] Lock "bfdeefe9-aa06-4ede-b8db-9b7a00cb89c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.369s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.791368] env[61905]: DEBUG nova.scheduler.client.report [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 740.046787] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 740.080585] env[61905]: DEBUG nova.compute.manager [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Received event network-changed-38cfc58f-147f-4c07-b58b-0a1924ced7ca {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 740.080777] env[61905]: DEBUG nova.compute.manager [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Refreshing instance network info cache due to event network-changed-38cfc58f-147f-4c07-b58b-0a1924ced7ca. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 740.081140] env[61905]: DEBUG oslo_concurrency.lockutils [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] Acquiring lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.081140] env[61905]: DEBUG oslo_concurrency.lockutils [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] Acquired lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.081365] env[61905]: DEBUG nova.network.neutron [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Refreshing network info cache for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 740.205926] env[61905]: ERROR nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 740.205926] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.205926] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.205926] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.205926] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.205926] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.205926] env[61905]: ERROR nova.compute.manager raise self.value [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.205926] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 740.205926] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.205926] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 740.206451] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.206451] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 740.206451] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 740.206451] env[61905]: ERROR nova.compute.manager [ 740.206451] env[61905]: Traceback (most recent call last): [ 740.206451] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 740.206451] env[61905]: listener.cb(fileno) [ 740.206451] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.206451] env[61905]: result = function(*args, **kwargs) [ 740.206451] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.206451] env[61905]: return func(*args, **kwargs) [ 740.206451] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.206451] env[61905]: raise e [ 740.206451] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.206451] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 740.206451] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.206451] env[61905]: created_port_ids = self._update_ports_for_instance( [ 740.206451] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.206451] env[61905]: with excutils.save_and_reraise_exception(): [ 740.206451] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.206451] env[61905]: self.force_reraise() [ 740.206451] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.206451] env[61905]: raise self.value [ 740.206451] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.206451] env[61905]: updated_port = self._update_port( [ 740.206451] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.206451] env[61905]: _ensure_no_port_binding_failure(port) [ 740.206451] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.206451] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 740.207359] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 740.207359] env[61905]: Removing descriptor: 17 [ 740.297708] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.901s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.298115] env[61905]: ERROR nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Traceback (most recent call last): [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self.driver.spawn(context, instance, image_meta, [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] vm_ref = self.build_virtual_machine(instance, [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.298115] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] for vif in network_info: [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return self._sync_wrapper(fn, *args, **kwargs) [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self.wait() [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self[:] = self._gt.wait() [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return self._exit_event.wait() [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] result = hub.switch() [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.298476] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return self.greenlet.switch() [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] result = function(*args, **kwargs) [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] return func(*args, **kwargs) [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] raise e [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] nwinfo = self.network_api.allocate_for_instance( [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] created_port_ids = self._update_ports_for_instance( [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] with excutils.save_and_reraise_exception(): [ 740.298815] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] self.force_reraise() [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] raise self.value [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] updated_port = self._update_port( [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] _ensure_no_port_binding_failure(port) [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] raise exception.PortBindingFailed(port_id=port['id']) [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] nova.exception.PortBindingFailed: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. [ 740.299419] env[61905]: ERROR nova.compute.manager [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] [ 740.299860] env[61905]: DEBUG nova.compute.utils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.300121] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.647s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.302089] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Build of instance ef6e5c2c-1778-4079-ae35-55f9264a060d was re-scheduled: Binding failed for port c3c0d9b0-a94d-4ed4-86a0-3a3568a43acf, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 740.303110] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 740.303110] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquiring lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.303110] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Acquired lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.303110] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.417407] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 740.441571] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.442455] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.442455] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.442455] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.442455] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.442455] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.442693] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.442737] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.442896] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.443063] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.443232] env[61905]: DEBUG nova.virt.hardware [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.444106] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84ac6e7-9d10-47a5-8e50-dbd66a1290dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.452797] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12118301-1385-40ee-ad5c-f36da7c2e618 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.465804] env[61905]: ERROR nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Traceback (most recent call last): [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] yield resources [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self.driver.spawn(context, instance, image_meta, [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] vm_ref = self.build_virtual_machine(instance, [ 740.465804] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] for vif in network_info: [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] return self._sync_wrapper(fn, *args, **kwargs) [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self.wait() [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self[:] = self._gt.wait() [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] return self._exit_event.wait() [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 740.466282] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] current.throw(*self._exc) [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] result = function(*args, **kwargs) [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] return func(*args, **kwargs) [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] raise e [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] nwinfo = self.network_api.allocate_for_instance( [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] created_port_ids = self._update_ports_for_instance( [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] with excutils.save_and_reraise_exception(): [ 740.466685] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self.force_reraise() [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] raise self.value [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] updated_port = self._update_port( [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] _ensure_no_port_binding_failure(port) [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] raise exception.PortBindingFailed(port_id=port['id']) [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 740.467105] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] [ 740.467105] env[61905]: INFO nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Terminating instance [ 740.468069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquiring lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.567614] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.601630] env[61905]: DEBUG nova.network.neutron [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.680242] env[61905]: DEBUG nova.network.neutron [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.824050] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.902775] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.185941] env[61905]: DEBUG oslo_concurrency.lockutils [req-a2098188-85d2-4d08-bb90-d22ade504788 req-a20f96fd-45c3-4c76-a95b-c41afa3b2928 service nova] Releasing lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.186371] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquired lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.186549] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.408328] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Releasing lock "refresh_cache-ef6e5c2c-1778-4079-ae35-55f9264a060d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.408634] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 741.408769] env[61905]: DEBUG nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 741.408934] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.430877] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.708250] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.817913] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.835399] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ef6e5c2c-1778-4079-ae35-55f9264a060d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 741.839877] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 8d2cb485-32da-4fe7-8462-d98c071a6310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.839877] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 6783654c-4f87-4353-b9ba-1299158eba3a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.839877] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 24f832e6-9704-4105-a17f-c5c77fa52d74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.839877] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 5bdd7f80-e321-475f-8132-7047a7f24c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.840469] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d4e5eb34-6f16-4920-9f95-7ea8b080084b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 741.933364] env[61905]: DEBUG nova.network.neutron [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.105533] env[61905]: DEBUG nova.compute.manager [req-81d170d7-ac08-4d8a-87d3-4fde8bf57710 req-b1ef815a-89ff-447c-9281-6010f74e31eb service nova] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Received event network-vif-deleted-38cfc58f-147f-4c07-b58b-0a1924ced7ca {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.321650] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Releasing lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.322059] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 742.322262] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.322562] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7ad4e91-7251-4a8c-8ae0-7f7165436cce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.331976] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07640a1b-6f8d-43c6-bfd4-cd121d1793d7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.342758] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 090f2b46-e4f1-4b6b-b596-dd1937969007 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 742.356553] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d4e5eb34-6f16-4920-9f95-7ea8b080084b could not be found. [ 742.356776] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.356963] env[61905]: INFO nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 742.357311] env[61905]: DEBUG oslo.service.loopingcall [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.358140] env[61905]: DEBUG nova.compute.manager [-] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.358259] env[61905]: DEBUG nova.network.neutron [-] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.379068] env[61905]: DEBUG nova.network.neutron [-] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.439487] env[61905]: INFO nova.compute.manager [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] [instance: ef6e5c2c-1778-4079-ae35-55f9264a060d] Took 1.03 seconds to deallocate network for instance. [ 742.848503] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9222cafc-fcee-40b9-b6c3-f1cf677324ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 742.881850] env[61905]: DEBUG nova.network.neutron [-] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.352084] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1b57ca4b-a8b9-497f-bc81-71c31510093e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.383823] env[61905]: INFO nova.compute.manager [-] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Took 1.03 seconds to deallocate network for instance. [ 743.386485] env[61905]: DEBUG nova.compute.claims [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 743.386485] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.470371] env[61905]: INFO nova.scheduler.client.report [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Deleted allocations for instance ef6e5c2c-1778-4079-ae35-55f9264a060d [ 743.855640] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 743.979055] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1819f71a-8a5d-4aa5-bbc3-baaafa5e9525 tempest-ServerActionsTestOtherB-1983127499 tempest-ServerActionsTestOtherB-1983127499-project-member] Lock "ef6e5c2c-1778-4079-ae35-55f9264a060d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.264s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.358807] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 744.483293] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 744.861859] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 79537eaa-5abf-477b-bce6-c079c9beb964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 745.014555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.364861] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 3ad9206a-5562-43a6-87a4-869f93b10933 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 745.868330] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 0bed6b1c-237b-469d-9f9b-0c4c84550ffb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 746.371549] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 8ef98f37-9059-4658-9679-fb50dc812eb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 746.874585] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 60e68738-a333-44b2-a1e8-0b3da728059e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.377995] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a4a03b8a-3206-4684-9d85-0e60ac643175 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 747.880968] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d31570f0-7662-4e13-9dee-51dc66728acc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 748.384279] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance fed05097-de84-4617-bf9e-7fc116ebc56e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 748.887430] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ba3a1e36-a9f8-4482-908e-9c949c6f42ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.390681] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance aeb72a57-d319-479d-a1c7-3cebc6f73f09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 749.893504] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 74f94a46-63e4-44e0-9142-7e7d46cd31a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.396737] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 111d10e8-7e36-48b6-be45-2275c36fbee4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 750.899935] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 12c21d8e-1941-4481-9216-015ba6c09b9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.403486] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a9ac365e-2be1-438d-a514-6fa7b26fa10c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 751.908899] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e1a22f3e-4557-44d2-8e34-cc75f573fe41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.411962] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a6e45dd1-e0ee-4bda-9513-4b1000e15e49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 752.412263] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 752.412412] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 752.699077] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d05bd0-4cfc-4b8d-a211-ff145a7ebe39 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.706743] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9325f9ab-5f14-486c-bf00-7e5ce6efec7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.736103] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697c09f3-131d-4585-8778-bf3dcad933b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.745544] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe5077f-0318-4e35-b5be-fc1d59c2b4b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.764045] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.270056] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.774706] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 753.774962] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.475s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.775598] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.114s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.570035] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63bc507-465c-4227-963c-36f7079fe856 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.577656] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051dfcb8-67cc-4c70-b104-e757335b4a9a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.607010] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5d2b7d-72ce-4e5a-a8cd-54935b24d535 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.613498] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2319d78a-312f-4a7f-800e-d3b4eeed80c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.625887] env[61905]: DEBUG nova.compute.provider_tree [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.130590] env[61905]: DEBUG nova.scheduler.client.report [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 755.636209] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.636852] env[61905]: ERROR nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Traceback (most recent call last): [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self.driver.spawn(context, instance, image_meta, [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self._vmops.spawn(context, instance, image_meta, injected_files, [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] vm_ref = self.build_virtual_machine(instance, [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] vif_infos = vmwarevif.get_vif_info(self._session, [ 755.636852] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] for vif in network_info: [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] return self._sync_wrapper(fn, *args, **kwargs) [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self.wait() [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self[:] = self._gt.wait() [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] return self._exit_event.wait() [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] current.throw(*self._exc) [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 755.637259] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] result = function(*args, **kwargs) [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] return func(*args, **kwargs) [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] raise e [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] nwinfo = self.network_api.allocate_for_instance( [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] created_port_ids = self._update_ports_for_instance( [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] with excutils.save_and_reraise_exception(): [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] self.force_reraise() [ 755.637720] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] raise self.value [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] updated_port = self._update_port( [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] _ensure_no_port_binding_failure(port) [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] raise exception.PortBindingFailed(port_id=port['id']) [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] nova.exception.PortBindingFailed: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. [ 755.638127] env[61905]: ERROR nova.compute.manager [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] [ 755.638127] env[61905]: DEBUG nova.compute.utils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 755.638833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.366s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.640367] env[61905]: INFO nova.compute.claims [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.643687] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Build of instance 8d2cb485-32da-4fe7-8462-d98c071a6310 was re-scheduled: Binding failed for port 5455749f-be47-4556-add4-b7cf3cd4822a, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 755.644114] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 755.644338] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquiring lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.644484] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Acquired lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.644640] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.167020] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.256690] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.758778] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Releasing lock "refresh_cache-8d2cb485-32da-4fe7-8462-d98c071a6310" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.759072] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 756.759192] env[61905]: DEBUG nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 756.759357] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 756.782820] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.960572] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca62838-595b-4ee5-8678-90337555349e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.968157] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cb8e36-c802-4d59-918f-fa47ff836ac8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.998075] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4356ec6-af23-420f-abdd-9a2f05edb05e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.005808] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b9a715-4a4e-4bf7-95bc-dfe070b92091 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.019343] env[61905]: DEBUG nova.compute.provider_tree [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.286933] env[61905]: DEBUG nova.network.neutron [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.454646] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.454892] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.524481] env[61905]: DEBUG nova.scheduler.client.report [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 757.789945] env[61905]: INFO nova.compute.manager [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] [instance: 8d2cb485-32da-4fe7-8462-d98c071a6310] Took 1.03 seconds to deallocate network for instance. [ 757.961024] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.961178] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.961332] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.029556] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.030118] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 758.032929] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.495s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.465823] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.538187] env[61905]: DEBUG nova.compute.utils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.543239] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 758.543239] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.587689] env[61905]: DEBUG nova.policy [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4eff81990ff47d78898ce92d8041996', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '225beb987e7647c4911cf7bb3c0d5a07', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 758.820260] env[61905]: INFO nova.scheduler.client.report [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Deleted allocations for instance 8d2cb485-32da-4fe7-8462-d98c071a6310 [ 758.898279] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85ca838-2fd0-4a25-a8a4-e6c2f67780cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.907576] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383777dc-37bb-469e-a3a7-12a54d936c38 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.937386] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Successfully created port: 52441f84-51c8-4aa1-a57d-51c826e780b7 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.939734] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1fffdf-fb33-4f94-bb1f-2a683f8cc29d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.947133] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1263b3-baa6-4b25-a6b5-1b311e4f3b30 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.960083] env[61905]: DEBUG nova.compute.provider_tree [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.044448] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 759.329975] env[61905]: DEBUG oslo_concurrency.lockutils [None req-de5861b6-6863-485a-bd81-b931bce301b4 tempest-ServerMetadataNegativeTestJSON-2041524069 tempest-ServerMetadataNegativeTestJSON-2041524069-project-member] Lock "8d2cb485-32da-4fe7-8462-d98c071a6310" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 162.655s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.463053] env[61905]: DEBUG nova.scheduler.client.report [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.778877] env[61905]: DEBUG nova.compute.manager [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Received event network-changed-52441f84-51c8-4aa1-a57d-51c826e780b7 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 759.778877] env[61905]: DEBUG nova.compute.manager [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Refreshing instance network info cache due to event network-changed-52441f84-51c8-4aa1-a57d-51c826e780b7. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 759.778877] env[61905]: DEBUG oslo_concurrency.lockutils [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] Acquiring lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.778877] env[61905]: DEBUG oslo_concurrency.lockutils [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] Acquired lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.778877] env[61905]: DEBUG nova.network.neutron [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Refreshing network info cache for port 52441f84-51c8-4aa1-a57d-51c826e780b7 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 759.832592] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 759.957417] env[61905]: ERROR nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 759.957417] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 759.957417] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 759.957417] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 759.957417] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.957417] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.957417] env[61905]: ERROR nova.compute.manager raise self.value [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 759.957417] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 759.957417] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.957417] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 759.957787] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.957787] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 759.957787] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 759.957787] env[61905]: ERROR nova.compute.manager [ 759.957787] env[61905]: Traceback (most recent call last): [ 759.957787] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 759.957787] env[61905]: listener.cb(fileno) [ 759.957787] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 759.957787] env[61905]: result = function(*args, **kwargs) [ 759.957787] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 759.957787] env[61905]: return func(*args, **kwargs) [ 759.957787] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 759.957787] env[61905]: raise e [ 759.957787] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 759.957787] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 759.957787] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 759.957787] env[61905]: created_port_ids = self._update_ports_for_instance( [ 759.957787] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 759.957787] env[61905]: with excutils.save_and_reraise_exception(): [ 759.957787] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.957787] env[61905]: self.force_reraise() [ 759.957787] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.957787] env[61905]: raise self.value [ 759.957787] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 759.957787] env[61905]: updated_port = self._update_port( [ 759.957787] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.957787] env[61905]: _ensure_no_port_binding_failure(port) [ 759.957787] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.957787] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 759.958403] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 759.958403] env[61905]: Removing descriptor: 17 [ 759.967552] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.935s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.968107] env[61905]: ERROR nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Traceback (most recent call last): [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self.driver.spawn(context, instance, image_meta, [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] vm_ref = self.build_virtual_machine(instance, [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.968107] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] for vif in network_info: [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] return self._sync_wrapper(fn, *args, **kwargs) [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self.wait() [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self[:] = self._gt.wait() [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] return self._exit_event.wait() [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] current.throw(*self._exc) [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 759.968401] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] result = function(*args, **kwargs) [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] return func(*args, **kwargs) [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] raise e [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] nwinfo = self.network_api.allocate_for_instance( [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] created_port_ids = self._update_ports_for_instance( [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] with excutils.save_and_reraise_exception(): [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] self.force_reraise() [ 759.968678] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] raise self.value [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] updated_port = self._update_port( [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] _ensure_no_port_binding_failure(port) [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] raise exception.PortBindingFailed(port_id=port['id']) [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] nova.exception.PortBindingFailed: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. [ 759.968952] env[61905]: ERROR nova.compute.manager [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] [ 759.968952] env[61905]: DEBUG nova.compute.utils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 759.970805] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.212s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.973528] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Build of instance 6783654c-4f87-4353-b9ba-1299158eba3a was re-scheduled: Binding failed for port b91af5df-97a5-4bae-bd1d-5b742bc89c07, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 759.973963] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 759.974201] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquiring lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.974346] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Acquired lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.974501] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.057205] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 760.082787] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 760.082787] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 760.083080] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.083080] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 760.083193] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.083342] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 760.083779] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 760.083942] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 760.084123] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 760.084287] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 760.084456] env[61905]: DEBUG nova.virt.hardware [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.085556] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de955b6-e9f9-4add-a83e-665b19bc1d14 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.094682] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bd5ea4-2c6c-4b43-af54-b1bbe0fba01a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.110703] env[61905]: ERROR nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Traceback (most recent call last): [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] yield resources [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self.driver.spawn(context, instance, image_meta, [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self._vmops.spawn(context, instance, image_meta, injected_files, [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] vm_ref = self.build_virtual_machine(instance, [ 760.110703] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] vif_infos = vmwarevif.get_vif_info(self._session, [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] for vif in network_info: [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] return self._sync_wrapper(fn, *args, **kwargs) [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self.wait() [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self[:] = self._gt.wait() [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] return self._exit_event.wait() [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 760.111174] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] current.throw(*self._exc) [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] result = function(*args, **kwargs) [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] return func(*args, **kwargs) [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] raise e [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] nwinfo = self.network_api.allocate_for_instance( [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] created_port_ids = self._update_ports_for_instance( [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] with excutils.save_and_reraise_exception(): [ 760.111631] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self.force_reraise() [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] raise self.value [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] updated_port = self._update_port( [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] _ensure_no_port_binding_failure(port) [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] raise exception.PortBindingFailed(port_id=port['id']) [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 760.111889] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] [ 760.111889] env[61905]: INFO nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Terminating instance [ 760.113052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquiring lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.300646] env[61905]: DEBUG nova.network.neutron [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.361600] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.391406] env[61905]: DEBUG nova.network.neutron [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.497963] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.582341] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.794479] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b271ca-9db8-41ff-b792-574d08ce97b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.801839] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed5cdc2-f6be-4667-8085-38433ba7c0f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.833357] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a1b250-5cff-4bd1-9d34-e328968fa6b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.839499] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14533ef5-b3be-4e0e-880f-79c1543597f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.852839] env[61905]: DEBUG nova.compute.provider_tree [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.894514] env[61905]: DEBUG oslo_concurrency.lockutils [req-36285bd6-1b68-41a2-8b28-b07bfbeebf4e req-d1143509-091b-40c9-8fe9-8cf9f880d6f3 service nova] Releasing lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.894571] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquired lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.894756] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.085168] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Releasing lock "refresh_cache-6783654c-4f87-4353-b9ba-1299158eba3a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.085517] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 761.085751] env[61905]: DEBUG nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 761.085979] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.100901] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.355856] env[61905]: DEBUG nova.scheduler.client.report [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.423495] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.557587] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.603462] env[61905]: DEBUG nova.network.neutron [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.836069] env[61905]: DEBUG nova.compute.manager [req-9b3f4370-f2e3-4acc-b7d9-7738bd84d56d req-94bc83ea-6b0e-426c-8a27-2f14acf130e1 service nova] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Received event network-vif-deleted-52441f84-51c8-4aa1-a57d-51c826e780b7 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 761.861070] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.890s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.861070] env[61905]: ERROR nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Traceback (most recent call last): [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self.driver.spawn(context, instance, image_meta, [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.861070] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] vm_ref = self.build_virtual_machine(instance, [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] for vif in network_info: [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] return self._sync_wrapper(fn, *args, **kwargs) [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self.wait() [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self[:] = self._gt.wait() [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] return self._exit_event.wait() [ 761.861339] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] current.throw(*self._exc) [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] result = function(*args, **kwargs) [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] return func(*args, **kwargs) [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] raise e [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] nwinfo = self.network_api.allocate_for_instance( [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] created_port_ids = self._update_ports_for_instance( [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 761.861684] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] with excutils.save_and_reraise_exception(): [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] self.force_reraise() [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] raise self.value [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] updated_port = self._update_port( [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] _ensure_no_port_binding_failure(port) [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] raise exception.PortBindingFailed(port_id=port['id']) [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] nova.exception.PortBindingFailed: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. [ 761.861931] env[61905]: ERROR nova.compute.manager [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] [ 761.862179] env[61905]: DEBUG nova.compute.utils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 761.863111] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.620s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.866281] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Build of instance 24f832e6-9704-4105-a17f-c5c77fa52d74 was re-scheduled: Binding failed for port 8a7a926c-a420-4fb7-9add-5294d3e29711, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 761.866772] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 761.867096] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquiring lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.867285] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Acquired lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.867546] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.060518] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Releasing lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.061033] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 762.061268] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.061613] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c24e8b2e-b380-4844-888d-534b7372d8e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.070767] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06248eb8-7752-488c-9ba7-bb9ef452bd42 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.091416] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 090f2b46-e4f1-4b6b-b596-dd1937969007 could not be found. [ 762.091621] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 762.091800] env[61905]: INFO nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Took 0.03 seconds to destroy the instance on the hypervisor. [ 762.092115] env[61905]: DEBUG oslo.service.loopingcall [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.092256] env[61905]: DEBUG nova.compute.manager [-] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 762.092346] env[61905]: DEBUG nova.network.neutron [-] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 762.106013] env[61905]: INFO nova.compute.manager [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] [instance: 6783654c-4f87-4353-b9ba-1299158eba3a] Took 1.02 seconds to deallocate network for instance. [ 762.109394] env[61905]: DEBUG nova.network.neutron [-] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.387857] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.484142] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.615986] env[61905]: DEBUG nova.network.neutron [-] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.669335] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6011ff18-b20e-480e-90bb-b8c99bd8b04f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.678023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f1e5ef-1a5f-4742-8468-d346ee0c9054 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.709875] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbcbb78-ed31-4ef1-87fb-9089df38e573 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.717718] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f192d35-e35f-4e76-a601-61e705d9b5e8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.732314] env[61905]: DEBUG nova.compute.provider_tree [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.987028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Releasing lock "refresh_cache-24f832e6-9704-4105-a17f-c5c77fa52d74" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.987028] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 762.987028] env[61905]: DEBUG nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 762.987028] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.009249] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.121784] env[61905]: INFO nova.compute.manager [-] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Took 1.03 seconds to deallocate network for instance. [ 763.127625] env[61905]: DEBUG nova.compute.claims [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 763.127806] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.146357] env[61905]: INFO nova.scheduler.client.report [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Deleted allocations for instance 6783654c-4f87-4353-b9ba-1299158eba3a [ 763.235854] env[61905]: DEBUG nova.scheduler.client.report [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.511754] env[61905]: DEBUG nova.network.neutron [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.654370] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4e9c7d50-b0c9-404a-a91c-a4cc8a3a794a tempest-ServerActionsV293TestJSON-1171886185 tempest-ServerActionsV293TestJSON-1171886185-project-member] Lock "6783654c-4f87-4353-b9ba-1299158eba3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 165.150s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.740609] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.877s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.741249] env[61905]: ERROR nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Traceback (most recent call last): [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self.driver.spawn(context, instance, image_meta, [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] vm_ref = self.build_virtual_machine(instance, [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.741249] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] for vif in network_info: [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] return self._sync_wrapper(fn, *args, **kwargs) [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self.wait() [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self[:] = self._gt.wait() [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] return self._exit_event.wait() [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] current.throw(*self._exc) [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 763.741542] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] result = function(*args, **kwargs) [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] return func(*args, **kwargs) [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] raise e [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] nwinfo = self.network_api.allocate_for_instance( [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] created_port_ids = self._update_ports_for_instance( [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] with excutils.save_and_reraise_exception(): [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] self.force_reraise() [ 763.741888] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] raise self.value [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] updated_port = self._update_port( [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] _ensure_no_port_binding_failure(port) [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] raise exception.PortBindingFailed(port_id=port['id']) [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] nova.exception.PortBindingFailed: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. [ 763.742223] env[61905]: ERROR nova.compute.manager [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] [ 763.742223] env[61905]: DEBUG nova.compute.utils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 763.743155] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.023s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.744969] env[61905]: INFO nova.compute.claims [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.747890] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Build of instance 5bdd7f80-e321-475f-8132-7047a7f24c75 was re-scheduled: Binding failed for port 6cbb8fa6-d3ef-4503-8d2d-d69a538b40cf, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 763.748344] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 763.748592] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.748744] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.748902] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.014947] env[61905]: INFO nova.compute.manager [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] [instance: 24f832e6-9704-4105-a17f-c5c77fa52d74] Took 1.03 seconds to deallocate network for instance. [ 764.157509] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 764.271238] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.351240] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.683471] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.857692] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Releasing lock "refresh_cache-5bdd7f80-e321-475f-8132-7047a7f24c75" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.857692] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 764.857692] env[61905]: DEBUG nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 764.857692] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 764.877219] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.045007] env[61905]: INFO nova.scheduler.client.report [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Deleted allocations for instance 24f832e6-9704-4105-a17f-c5c77fa52d74 [ 765.234201] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b5934f-d2d7-44b3-8798-b6f4094ee5fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.242103] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101ffade-2084-4acc-8517-12b0f0fa2531 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.280951] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94302f0-4bf9-4c1a-9aca-fe8aaafa54a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.288367] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e65669-e533-49ad-898f-e5c2aba114aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.303295] env[61905]: DEBUG nova.compute.provider_tree [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.380720] env[61905]: DEBUG nova.network.neutron [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.557829] env[61905]: DEBUG oslo_concurrency.lockutils [None req-881c7735-628b-4db2-bed5-b104f982f390 tempest-ImagesNegativeTestJSON-1484380717 tempest-ImagesNegativeTestJSON-1484380717-project-member] Lock "24f832e6-9704-4105-a17f-c5c77fa52d74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.090s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.806509] env[61905]: DEBUG nova.scheduler.client.report [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.884532] env[61905]: INFO nova.compute.manager [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 5bdd7f80-e321-475f-8132-7047a7f24c75] Took 1.03 seconds to deallocate network for instance. [ 766.061901] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 766.313977] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.571s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.314521] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 766.319878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.559s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.322543] env[61905]: INFO nova.compute.claims [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.590661] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.828087] env[61905]: DEBUG nova.compute.utils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 766.832490] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 766.832714] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.879352] env[61905]: DEBUG nova.policy [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 766.918453] env[61905]: INFO nova.scheduler.client.report [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Deleted allocations for instance 5bdd7f80-e321-475f-8132-7047a7f24c75 [ 767.206655] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Successfully created port: b81d5da7-8d7e-4da1-83af-badb397ecd37 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.332446] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 767.436759] env[61905]: DEBUG oslo_concurrency.lockutils [None req-151240a5-0522-43c4-8476-2045d3013b93 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "5bdd7f80-e321-475f-8132-7047a7f24c75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.993s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.702899] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e29fdc-940b-4dab-8d3a-4131e4810c0d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.711131] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bc8e08-ef3d-4c2f-ac9c-960271b7a511 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.747860] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2ca1dc-ef38-49a9-8f19-cd2fa79387ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.755949] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd400d9-f546-443b-8386-864ded4448f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.770908] env[61905]: DEBUG nova.compute.provider_tree [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.947011] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 768.259626] env[61905]: DEBUG nova.compute.manager [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Received event network-changed-b81d5da7-8d7e-4da1-83af-badb397ecd37 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 768.259883] env[61905]: DEBUG nova.compute.manager [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Refreshing instance network info cache due to event network-changed-b81d5da7-8d7e-4da1-83af-badb397ecd37. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 768.260122] env[61905]: DEBUG oslo_concurrency.lockutils [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] Acquiring lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.260349] env[61905]: DEBUG oslo_concurrency.lockutils [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] Acquired lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.260572] env[61905]: DEBUG nova.network.neutron [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Refreshing network info cache for port b81d5da7-8d7e-4da1-83af-badb397ecd37 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 768.279021] env[61905]: DEBUG nova.scheduler.client.report [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.347477] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 768.377945] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.378197] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.378350] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.378527] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.378667] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.378817] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.379012] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.379168] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.379329] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.379486] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.379654] env[61905]: DEBUG nova.virt.hardware [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.380814] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f197e9b-97cb-48f5-bd80-db83b10bce52 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.389448] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e0847f-8966-463f-815e-957f3f1c17a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.476263] env[61905]: ERROR nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 768.476263] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 768.476263] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.476263] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.476263] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.476263] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.476263] env[61905]: ERROR nova.compute.manager raise self.value [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.476263] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 768.476263] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.476263] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 768.476684] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.476684] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 768.476684] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 768.476684] env[61905]: ERROR nova.compute.manager [ 768.476684] env[61905]: Traceback (most recent call last): [ 768.476684] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 768.476684] env[61905]: listener.cb(fileno) [ 768.476684] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 768.476684] env[61905]: result = function(*args, **kwargs) [ 768.476684] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 768.476684] env[61905]: return func(*args, **kwargs) [ 768.476684] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 768.476684] env[61905]: raise e [ 768.476684] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 768.476684] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 768.476684] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.476684] env[61905]: created_port_ids = self._update_ports_for_instance( [ 768.476684] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.476684] env[61905]: with excutils.save_and_reraise_exception(): [ 768.476684] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.476684] env[61905]: self.force_reraise() [ 768.476684] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.476684] env[61905]: raise self.value [ 768.476684] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.476684] env[61905]: updated_port = self._update_port( [ 768.476684] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.476684] env[61905]: _ensure_no_port_binding_failure(port) [ 768.476684] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.476684] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 768.477425] env[61905]: nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 768.477425] env[61905]: Removing descriptor: 17 [ 768.477425] env[61905]: ERROR nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Traceback (most recent call last): [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] yield resources [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self.driver.spawn(context, instance, image_meta, [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 768.477425] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] vm_ref = self.build_virtual_machine(instance, [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] for vif in network_info: [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return self._sync_wrapper(fn, *args, **kwargs) [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self.wait() [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self[:] = self._gt.wait() [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return self._exit_event.wait() [ 768.477755] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] result = hub.switch() [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return self.greenlet.switch() [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] result = function(*args, **kwargs) [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return func(*args, **kwargs) [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] raise e [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] nwinfo = self.network_api.allocate_for_instance( [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 768.478151] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] created_port_ids = self._update_ports_for_instance( [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] with excutils.save_and_reraise_exception(): [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self.force_reraise() [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] raise self.value [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] updated_port = self._update_port( [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] _ensure_no_port_binding_failure(port) [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.478490] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] raise exception.PortBindingFailed(port_id=port['id']) [ 768.478792] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 768.478792] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] [ 768.478792] env[61905]: INFO nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Terminating instance [ 768.481460] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.600442] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.783294] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.783820] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 768.787076] env[61905]: DEBUG nova.network.neutron [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.788850] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.221s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.790247] env[61905]: INFO nova.compute.claims [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.906415] env[61905]: DEBUG nova.network.neutron [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.295280] env[61905]: DEBUG nova.compute.utils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 769.301980] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 769.302167] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 769.374794] env[61905]: DEBUG nova.policy [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9bd70488575b4578b2866fa3f25537e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a4772174fb44f8691551e688aebed1e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 769.409606] env[61905]: DEBUG oslo_concurrency.lockutils [req-3005f473-9049-4daa-a823-09cda022cc86 req-834de44c-ce74-4130-9d2d-24ab6a71298a service nova] Releasing lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.410054] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.410242] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.779898] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Successfully created port: 231af9a2-1f63-4de2-877b-d007606490f4 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.802657] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 769.935648] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.070142] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.113452] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92be9be3-d51e-45c5-9329-69c37495cd81 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.121530] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d3e216-d5fa-4608-8ce8-acc2e2fb3d6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.153906] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10a3109-c818-4c06-83a3-cc79f34748db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.162637] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fa5fb7-8ee1-4055-b314-202ae6c8294e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.176376] env[61905]: DEBUG nova.compute.provider_tree [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.294125] env[61905]: DEBUG nova.compute.manager [req-dd98c837-989f-4d9f-9ffb-b7798fea0f5d req-7cc0e71d-9ac0-40e4-90c5-f049ceebfde3 service nova] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Received event network-vif-deleted-b81d5da7-8d7e-4da1-83af-badb397ecd37 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 770.464916] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.468251] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.572816] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.573448] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 770.573647] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 770.573957] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcb0c5e5-5788-44e6-a930-22fd42879f87 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.592973] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37c8511-f52c-40d3-a456-d130f7fa4a38 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.614742] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9222cafc-fcee-40b9-b6c3-f1cf677324ac could not be found. [ 770.614973] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.615168] env[61905]: INFO nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 770.615425] env[61905]: DEBUG oslo.service.loopingcall [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.615680] env[61905]: DEBUG nova.compute.manager [-] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 770.615773] env[61905]: DEBUG nova.network.neutron [-] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.631533] env[61905]: DEBUG nova.network.neutron [-] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.679978] env[61905]: DEBUG nova.scheduler.client.report [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.824332] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 770.843622] env[61905]: ERROR nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 770.843622] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.843622] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.843622] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.843622] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.843622] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.843622] env[61905]: ERROR nova.compute.manager raise self.value [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.843622] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 770.843622] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.843622] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 770.844020] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.844020] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 770.844020] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 770.844020] env[61905]: ERROR nova.compute.manager [ 770.844020] env[61905]: Traceback (most recent call last): [ 770.844020] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 770.844020] env[61905]: listener.cb(fileno) [ 770.844020] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.844020] env[61905]: result = function(*args, **kwargs) [ 770.844020] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.844020] env[61905]: return func(*args, **kwargs) [ 770.844020] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.844020] env[61905]: raise e [ 770.844020] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.844020] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 770.844020] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.844020] env[61905]: created_port_ids = self._update_ports_for_instance( [ 770.844020] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.844020] env[61905]: with excutils.save_and_reraise_exception(): [ 770.844020] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.844020] env[61905]: self.force_reraise() [ 770.844020] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.844020] env[61905]: raise self.value [ 770.844020] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.844020] env[61905]: updated_port = self._update_port( [ 770.844020] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.844020] env[61905]: _ensure_no_port_binding_failure(port) [ 770.844020] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.844020] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 770.844735] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 770.844735] env[61905]: Removing descriptor: 17 [ 770.854388] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.854745] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.854963] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.855262] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.855538] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.855778] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.856050] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.856293] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.858945] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.858945] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.858945] env[61905]: DEBUG nova.virt.hardware [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.858945] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92dd29d-195b-4f91-94b5-187292405171 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.866031] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc10e3f-126f-480f-832e-8c53f408af00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.881443] env[61905]: ERROR nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Traceback (most recent call last): [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] yield resources [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self.driver.spawn(context, instance, image_meta, [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] vm_ref = self.build_virtual_machine(instance, [ 770.881443] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] for vif in network_info: [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] return self._sync_wrapper(fn, *args, **kwargs) [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self.wait() [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self[:] = self._gt.wait() [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] return self._exit_event.wait() [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 770.881813] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] current.throw(*self._exc) [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] result = function(*args, **kwargs) [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] return func(*args, **kwargs) [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] raise e [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] nwinfo = self.network_api.allocate_for_instance( [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] created_port_ids = self._update_ports_for_instance( [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] with excutils.save_and_reraise_exception(): [ 770.882194] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self.force_reraise() [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] raise self.value [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] updated_port = self._update_port( [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] _ensure_no_port_binding_failure(port) [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] raise exception.PortBindingFailed(port_id=port['id']) [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 770.882519] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] [ 770.882519] env[61905]: INFO nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Terminating instance [ 770.885578] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquiring lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.885748] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquired lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.885922] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.134469] env[61905]: DEBUG nova.network.neutron [-] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.184523] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.185036] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 771.187386] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.801s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.406470] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.498149] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.636731] env[61905]: INFO nova.compute.manager [-] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Took 1.02 seconds to deallocate network for instance. [ 771.639586] env[61905]: DEBUG nova.compute.claims [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 771.639797] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.691231] env[61905]: DEBUG nova.compute.utils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 771.695943] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 771.696130] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.747161] env[61905]: DEBUG nova.policy [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9cb738ff22d24733a94b170e8b25b3d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b04aea738a14a8386e9133be2939157', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 771.958241] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f68fd62-2fef-4812-b591-6de39d2e52e7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.967193] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdb3b4d-a8d4-4d5a-a1ad-1959fc113942 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.996617] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cf44eb-3aa8-4f2a-8be1-b9b5db68d68b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.000847] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Releasing lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.001243] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 772.001423] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 772.003891] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc92eec9-5786-4a1a-9a66-2a8101d64f4d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.006436] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e270f735-6e88-48ab-8bdd-d28b1d21bb62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.021489] env[61905]: DEBUG nova.compute.provider_tree [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.025437] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71032f92-ea15-4ad4-90c6-0d5c0e5f8e85 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.036471] env[61905]: DEBUG nova.scheduler.client.report [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.050202] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1b57ca4b-a8b9-497f-bc81-71c31510093e could not be found. [ 772.050403] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.050605] env[61905]: INFO nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 772.050794] env[61905]: DEBUG oslo.service.loopingcall [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.051456] env[61905]: DEBUG nova.compute.manager [-] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 772.051554] env[61905]: DEBUG nova.network.neutron [-] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.193826] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 772.205979] env[61905]: DEBUG nova.network.neutron [-] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.245292] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Successfully created port: 4fe9bda5-26c4-47c0-b3b8-87292950204d {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.322393] env[61905]: DEBUG nova.compute.manager [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Received event network-changed-231af9a2-1f63-4de2-877b-d007606490f4 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 772.322590] env[61905]: DEBUG nova.compute.manager [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Refreshing instance network info cache due to event network-changed-231af9a2-1f63-4de2-877b-d007606490f4. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 772.322800] env[61905]: DEBUG oslo_concurrency.lockutils [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] Acquiring lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.322939] env[61905]: DEBUG oslo_concurrency.lockutils [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] Acquired lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.323105] env[61905]: DEBUG nova.network.neutron [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Refreshing network info cache for port 231af9a2-1f63-4de2-877b-d007606490f4 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.543133] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.353s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.543133] env[61905]: ERROR nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Traceback (most recent call last): [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self.driver.spawn(context, instance, image_meta, [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 772.543133] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] vm_ref = self.build_virtual_machine(instance, [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] vif_infos = vmwarevif.get_vif_info(self._session, [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] for vif in network_info: [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] return self._sync_wrapper(fn, *args, **kwargs) [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self.wait() [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self[:] = self._gt.wait() [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] return self._exit_event.wait() [ 772.543563] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] current.throw(*self._exc) [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] result = function(*args, **kwargs) [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] return func(*args, **kwargs) [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] raise e [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] nwinfo = self.network_api.allocate_for_instance( [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] created_port_ids = self._update_ports_for_instance( [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 772.543876] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] with excutils.save_and_reraise_exception(): [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] self.force_reraise() [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] raise self.value [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] updated_port = self._update_port( [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] _ensure_no_port_binding_failure(port) [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] raise exception.PortBindingFailed(port_id=port['id']) [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] nova.exception.PortBindingFailed: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. [ 772.544192] env[61905]: ERROR nova.compute.manager [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] [ 772.544475] env[61905]: DEBUG nova.compute.utils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 772.548813] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.531s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.548813] env[61905]: INFO nova.compute.claims [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.551280] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Build of instance d4e5eb34-6f16-4920-9f95-7ea8b080084b was re-scheduled: Binding failed for port 38cfc58f-147f-4c07-b58b-0a1924ced7ca, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 772.552174] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 772.552543] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquiring lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.552836] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Acquired lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.553156] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.708815] env[61905]: DEBUG nova.network.neutron [-] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.849337] env[61905]: DEBUG nova.network.neutron [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.973880] env[61905]: DEBUG nova.network.neutron [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.078845] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.204949] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.206048] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 773.210032] env[61905]: INFO nova.compute.manager [-] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Took 1.16 seconds to deallocate network for instance. [ 773.212226] env[61905]: DEBUG nova.compute.claims [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 773.212445] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.233063] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 773.233350] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 773.233528] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.233779] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 773.233936] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.234110] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 773.234318] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 773.234491] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 773.234670] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 773.234830] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 773.234998] env[61905]: DEBUG nova.virt.hardware [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 773.236113] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16508b70-9e49-43ae-a2fd-b6079a913e82 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.244346] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2808c833-0495-4084-b6da-1b392c36d7db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.293752] env[61905]: ERROR nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 773.293752] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.293752] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 773.293752] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 773.293752] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.293752] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.293752] env[61905]: ERROR nova.compute.manager raise self.value [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 773.293752] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 773.293752] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.293752] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 773.294118] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.294118] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 773.294118] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 773.294118] env[61905]: ERROR nova.compute.manager [ 773.294118] env[61905]: Traceback (most recent call last): [ 773.294118] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 773.294118] env[61905]: listener.cb(fileno) [ 773.294118] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 773.294118] env[61905]: result = function(*args, **kwargs) [ 773.294118] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 773.294118] env[61905]: return func(*args, **kwargs) [ 773.294118] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 773.294118] env[61905]: raise e [ 773.294118] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.294118] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 773.294118] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 773.294118] env[61905]: created_port_ids = self._update_ports_for_instance( [ 773.294118] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 773.294118] env[61905]: with excutils.save_and_reraise_exception(): [ 773.294118] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.294118] env[61905]: self.force_reraise() [ 773.294118] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.294118] env[61905]: raise self.value [ 773.294118] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 773.294118] env[61905]: updated_port = self._update_port( [ 773.294118] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.294118] env[61905]: _ensure_no_port_binding_failure(port) [ 773.294118] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.294118] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 773.294802] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 773.294802] env[61905]: Removing descriptor: 17 [ 773.294802] env[61905]: ERROR nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Traceback (most recent call last): [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] yield resources [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self.driver.spawn(context, instance, image_meta, [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 773.294802] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] vm_ref = self.build_virtual_machine(instance, [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] vif_infos = vmwarevif.get_vif_info(self._session, [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] for vif in network_info: [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return self._sync_wrapper(fn, *args, **kwargs) [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self.wait() [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self[:] = self._gt.wait() [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return self._exit_event.wait() [ 773.295064] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] result = hub.switch() [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return self.greenlet.switch() [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] result = function(*args, **kwargs) [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return func(*args, **kwargs) [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] raise e [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] nwinfo = self.network_api.allocate_for_instance( [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 773.295330] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] created_port_ids = self._update_ports_for_instance( [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] with excutils.save_and_reraise_exception(): [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self.force_reraise() [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] raise self.value [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] updated_port = self._update_port( [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] _ensure_no_port_binding_failure(port) [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.295659] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] raise exception.PortBindingFailed(port_id=port['id']) [ 773.295961] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 773.295961] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] [ 773.295961] env[61905]: INFO nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Terminating instance [ 773.297018] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquiring lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.297143] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquired lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.297315] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.475620] env[61905]: DEBUG oslo_concurrency.lockutils [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] Releasing lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.475918] env[61905]: DEBUG nova.compute.manager [req-e95dcccf-ac92-4060-a53b-de17b81d902a req-934c2f91-c564-48e8-bc2b-94978e03438e service nova] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Received event network-vif-deleted-231af9a2-1f63-4de2-877b-d007606490f4 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.709246] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Releasing lock "refresh_cache-d4e5eb34-6f16-4920-9f95-7ea8b080084b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.709524] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 773.709649] env[61905]: DEBUG nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.709843] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 773.723991] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.805077] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578d5c26-765c-4222-9411-95732f1f08fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.812600] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a765cea0-ac73-4222-8f20-39d3b8a24518 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.842797] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.844959] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91e4526-279d-490b-8c76-fa3d3635bb8e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.851995] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1db8b92-6e9d-4fdc-ad06-b54306ecdad7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.865146] env[61905]: DEBUG nova.compute.provider_tree [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.905098] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.228497] env[61905]: DEBUG nova.network.neutron [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.350376] env[61905]: DEBUG nova.compute.manager [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Received event network-changed-4fe9bda5-26c4-47c0-b3b8-87292950204d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 774.350568] env[61905]: DEBUG nova.compute.manager [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Refreshing instance network info cache due to event network-changed-4fe9bda5-26c4-47c0-b3b8-87292950204d. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 774.350755] env[61905]: DEBUG oslo_concurrency.lockutils [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] Acquiring lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.369144] env[61905]: DEBUG nova.scheduler.client.report [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 774.408111] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Releasing lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.408515] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 774.408703] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.408990] env[61905]: DEBUG oslo_concurrency.lockutils [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] Acquired lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.409170] env[61905]: DEBUG nova.network.neutron [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Refreshing network info cache for port 4fe9bda5-26c4-47c0-b3b8-87292950204d {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.410122] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-015f3bfa-4609-4c15-be6c-f58c9c6624fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.420058] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d07f30-8452-4014-b441-74c8bba85ab7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.441808] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8 could not be found. [ 774.442022] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 774.442448] env[61905]: INFO nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 774.442448] env[61905]: DEBUG oslo.service.loopingcall [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.442601] env[61905]: DEBUG nova.compute.manager [-] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 774.442694] env[61905]: DEBUG nova.network.neutron [-] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 774.457481] env[61905]: DEBUG nova.network.neutron [-] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.730965] env[61905]: INFO nova.compute.manager [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] [instance: d4e5eb34-6f16-4920-9f95-7ea8b080084b] Took 1.02 seconds to deallocate network for instance. [ 774.873512] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.328s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.873986] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 774.876535] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.411s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.877159] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.877321] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 774.877643] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.516s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.879098] env[61905]: INFO nova.compute.claims [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.882961] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191c5dd4-3ae1-4f1d-940e-679a5c0ac71f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.892286] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b226c32b-9937-4b25-96b3-2466da9ca235 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.906793] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b2e444-b3e2-42f4-9d71-ea7e175401c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.913482] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44089cf2-756b-4c2b-8cb8-8f844aec01a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.945158] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181463MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 774.945319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.946268] env[61905]: DEBUG nova.network.neutron [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.959740] env[61905]: DEBUG nova.network.neutron [-] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.012666] env[61905]: DEBUG nova.network.neutron [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.387542] env[61905]: DEBUG nova.compute.utils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.389421] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 775.389627] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.427027] env[61905]: DEBUG nova.policy [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '495e4ddb99b44d1f862021a17e219716', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a3f15509b44f4e5b904aa4cd896651cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 775.461907] env[61905]: INFO nova.compute.manager [-] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Took 1.02 seconds to deallocate network for instance. [ 775.464059] env[61905]: DEBUG nova.compute.claims [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 775.464235] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.517730] env[61905]: DEBUG oslo_concurrency.lockutils [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] Releasing lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.517730] env[61905]: DEBUG nova.compute.manager [req-191507b0-4433-48a9-81ca-f8554bc98940 req-bcf6d160-ba5e-4637-8126-94a0d5ab8a2d service nova] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Received event network-vif-deleted-4fe9bda5-26c4-47c0-b3b8-87292950204d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 775.695182] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Successfully created port: 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.764576] env[61905]: INFO nova.scheduler.client.report [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Deleted allocations for instance d4e5eb34-6f16-4920-9f95-7ea8b080084b [ 775.890129] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 776.166190] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84a3454-288a-4f30-ac17-8b84cf3e8706 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.172647] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3299a66a-e944-4438-92a4-8147390dd76b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.204857] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff5e52f-8e73-4fb8-83af-ea7b5a5b3d1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.212139] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe5fee6-cb0f-49a1-b677-f9b07d11adf5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.229769] env[61905]: DEBUG nova.compute.provider_tree [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.276751] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8c4acae7-05c3-4c5f-b577-d8963cf07deb tempest-FloatingIPsAssociationTestJSON-1908461081 tempest-FloatingIPsAssociationTestJSON-1908461081-project-member] Lock "d4e5eb34-6f16-4920-9f95-7ea8b080084b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.232s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.661967] env[61905]: DEBUG nova.compute.manager [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Received event network-changed-8fe0ee29-a1ac-4179-8359-d2b2408f3a2e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 776.661967] env[61905]: DEBUG nova.compute.manager [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Refreshing instance network info cache due to event network-changed-8fe0ee29-a1ac-4179-8359-d2b2408f3a2e. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 776.661967] env[61905]: DEBUG oslo_concurrency.lockutils [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] Acquiring lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.661967] env[61905]: DEBUG oslo_concurrency.lockutils [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] Acquired lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.661967] env[61905]: DEBUG nova.network.neutron [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Refreshing network info cache for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.732943] env[61905]: DEBUG nova.scheduler.client.report [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 776.779790] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 776.905817] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 776.950023] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 776.950023] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 776.950023] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.950327] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 776.950327] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.950327] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 776.950327] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 776.950327] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 776.950499] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 776.950499] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 776.950499] env[61905]: DEBUG nova.virt.hardware [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 776.950499] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d0ac03-f2ce-4b95-9aa5-ef38408f9538 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.953914] env[61905]: ERROR nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 776.953914] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.953914] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.953914] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.953914] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.953914] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.953914] env[61905]: ERROR nova.compute.manager raise self.value [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.953914] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 776.953914] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.953914] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 776.954442] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.954442] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 776.954442] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 776.954442] env[61905]: ERROR nova.compute.manager [ 776.954442] env[61905]: Traceback (most recent call last): [ 776.954442] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 776.954442] env[61905]: listener.cb(fileno) [ 776.954442] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.954442] env[61905]: result = function(*args, **kwargs) [ 776.954442] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.954442] env[61905]: return func(*args, **kwargs) [ 776.954442] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.954442] env[61905]: raise e [ 776.954442] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.954442] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 776.954442] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.954442] env[61905]: created_port_ids = self._update_ports_for_instance( [ 776.954442] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.954442] env[61905]: with excutils.save_and_reraise_exception(): [ 776.954442] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.954442] env[61905]: self.force_reraise() [ 776.954442] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.954442] env[61905]: raise self.value [ 776.954442] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.954442] env[61905]: updated_port = self._update_port( [ 776.954442] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.954442] env[61905]: _ensure_no_port_binding_failure(port) [ 776.954442] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.954442] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 776.955916] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 776.955916] env[61905]: Removing descriptor: 17 [ 776.960697] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128669ee-8230-4760-a723-d44dd1d95978 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.977417] env[61905]: ERROR nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Traceback (most recent call last): [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] yield resources [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self.driver.spawn(context, instance, image_meta, [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] vm_ref = self.build_virtual_machine(instance, [ 776.977417] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] vif_infos = vmwarevif.get_vif_info(self._session, [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] for vif in network_info: [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] return self._sync_wrapper(fn, *args, **kwargs) [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self.wait() [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self[:] = self._gt.wait() [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] return self._exit_event.wait() [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 776.977832] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] current.throw(*self._exc) [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] result = function(*args, **kwargs) [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] return func(*args, **kwargs) [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] raise e [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] nwinfo = self.network_api.allocate_for_instance( [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] created_port_ids = self._update_ports_for_instance( [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] with excutils.save_and_reraise_exception(): [ 776.978203] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self.force_reraise() [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] raise self.value [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] updated_port = self._update_port( [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] _ensure_no_port_binding_failure(port) [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] raise exception.PortBindingFailed(port_id=port['id']) [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 776.978597] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] [ 776.978597] env[61905]: INFO nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Terminating instance [ 776.979328] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquiring lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.179877] env[61905]: DEBUG nova.network.neutron [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.237707] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.238320] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 777.241793] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.114s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.309045] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.313992] env[61905]: DEBUG nova.network.neutron [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.746448] env[61905]: DEBUG nova.compute.utils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 777.747821] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 777.747987] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 777.816346] env[61905]: DEBUG oslo_concurrency.lockutils [req-23c36d53-186c-4383-8853-005369bafabd req-5d0e24f3-cdc2-4bf6-82bb-99cbb4820338 service nova] Releasing lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.818173] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquired lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.818173] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.823833] env[61905]: DEBUG nova.policy [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0fc3bde6f3748df8116a36b9f7260b0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a78ffb1a94ca4220a39c68529eb5693d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 778.099055] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9d5429-6d20-4c05-a675-133d230b0091 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.106765] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec73bab9-e065-406c-be02-0e9826736b8f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.137782] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92893c9-86d9-4a2e-b3bf-5af5db9d80a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.146628] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7bc74f-8ee3-4afb-abf9-5511a54b91a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.160753] env[61905]: DEBUG nova.compute.provider_tree [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.253876] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 778.342081] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.464342] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Successfully created port: 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.477812] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.667201] env[61905]: DEBUG nova.scheduler.client.report [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.708036] env[61905]: DEBUG nova.compute.manager [req-8cec46b6-f2e6-4226-8e04-db4ff4cc08dd req-04fe68b2-a4b0-4f03-9e92-695d23e90e66 service nova] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Received event network-vif-deleted-8fe0ee29-a1ac-4179-8359-d2b2408f3a2e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 778.980446] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Releasing lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.980849] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 778.981048] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 778.981334] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dc43fe6-a41c-4fc1-be10-10f8a034f45f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.990700] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4003d178-82f0-4e47-8557-d18dc14d4bfe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.013766] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14 could not be found. [ 779.013984] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 779.014170] env[61905]: INFO nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Took 0.03 seconds to destroy the instance on the hypervisor. [ 779.014409] env[61905]: DEBUG oslo.service.loopingcall [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.014621] env[61905]: DEBUG nova.compute.manager [-] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 779.014705] env[61905]: DEBUG nova.network.neutron [-] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 779.041314] env[61905]: DEBUG nova.network.neutron [-] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.173073] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.931s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.173713] env[61905]: ERROR nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Traceback (most recent call last): [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self.driver.spawn(context, instance, image_meta, [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self._vmops.spawn(context, instance, image_meta, injected_files, [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] vm_ref = self.build_virtual_machine(instance, [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] vif_infos = vmwarevif.get_vif_info(self._session, [ 779.173713] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] for vif in network_info: [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] return self._sync_wrapper(fn, *args, **kwargs) [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self.wait() [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self[:] = self._gt.wait() [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] return self._exit_event.wait() [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] current.throw(*self._exc) [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 779.174010] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] result = function(*args, **kwargs) [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] return func(*args, **kwargs) [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] raise e [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] nwinfo = self.network_api.allocate_for_instance( [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] created_port_ids = self._update_ports_for_instance( [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] with excutils.save_and_reraise_exception(): [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] self.force_reraise() [ 779.174399] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] raise self.value [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] updated_port = self._update_port( [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] _ensure_no_port_binding_failure(port) [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] raise exception.PortBindingFailed(port_id=port['id']) [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] nova.exception.PortBindingFailed: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. [ 779.174706] env[61905]: ERROR nova.compute.manager [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] [ 779.174706] env[61905]: DEBUG nova.compute.utils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 779.176353] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Build of instance 090f2b46-e4f1-4b6b-b596-dd1937969007 was re-scheduled: Binding failed for port 52441f84-51c8-4aa1-a57d-51c826e780b7, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 779.176757] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 779.176977] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquiring lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.177163] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Acquired lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.177301] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.178753] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.495s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.180352] env[61905]: INFO nova.compute.claims [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 779.263509] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 779.293165] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 779.293368] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 779.293526] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.293711] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 779.293846] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.293989] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 779.294309] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 779.294615] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 779.294727] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 779.294875] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 779.295317] env[61905]: DEBUG nova.virt.hardware [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 779.296182] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf7748f-bb1b-4865-b839-b66f6566f17d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.305874] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08051d1-e80c-4dea-88e1-92ce525a2d32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.514606] env[61905]: ERROR nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 779.514606] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.514606] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 779.514606] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 779.514606] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.514606] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.514606] env[61905]: ERROR nova.compute.manager raise self.value [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 779.514606] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 779.514606] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.514606] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 779.515016] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.515016] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 779.515016] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 779.515016] env[61905]: ERROR nova.compute.manager [ 779.515016] env[61905]: Traceback (most recent call last): [ 779.515016] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 779.515016] env[61905]: listener.cb(fileno) [ 779.515016] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 779.515016] env[61905]: result = function(*args, **kwargs) [ 779.515016] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 779.515016] env[61905]: return func(*args, **kwargs) [ 779.515016] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 779.515016] env[61905]: raise e [ 779.515016] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.515016] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 779.515016] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 779.515016] env[61905]: created_port_ids = self._update_ports_for_instance( [ 779.515016] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 779.515016] env[61905]: with excutils.save_and_reraise_exception(): [ 779.515016] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.515016] env[61905]: self.force_reraise() [ 779.515016] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.515016] env[61905]: raise self.value [ 779.515016] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 779.515016] env[61905]: updated_port = self._update_port( [ 779.515016] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.515016] env[61905]: _ensure_no_port_binding_failure(port) [ 779.515016] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.515016] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 779.515681] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 779.515681] env[61905]: Removing descriptor: 17 [ 779.515681] env[61905]: ERROR nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Traceback (most recent call last): [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] yield resources [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self.driver.spawn(context, instance, image_meta, [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self._vmops.spawn(context, instance, image_meta, injected_files, [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 779.515681] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] vm_ref = self.build_virtual_machine(instance, [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] vif_infos = vmwarevif.get_vif_info(self._session, [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] for vif in network_info: [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return self._sync_wrapper(fn, *args, **kwargs) [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self.wait() [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self[:] = self._gt.wait() [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return self._exit_event.wait() [ 779.515937] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] result = hub.switch() [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return self.greenlet.switch() [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] result = function(*args, **kwargs) [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return func(*args, **kwargs) [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] raise e [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] nwinfo = self.network_api.allocate_for_instance( [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 779.516375] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] created_port_ids = self._update_ports_for_instance( [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] with excutils.save_and_reraise_exception(): [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self.force_reraise() [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] raise self.value [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] updated_port = self._update_port( [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] _ensure_no_port_binding_failure(port) [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.516678] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] raise exception.PortBindingFailed(port_id=port['id']) [ 779.518269] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 779.518269] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] [ 779.518269] env[61905]: INFO nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Terminating instance [ 779.519089] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.519252] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.519412] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 779.544130] env[61905]: DEBUG nova.network.neutron [-] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.695219] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 779.852621] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.037811] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.048042] env[61905]: INFO nova.compute.manager [-] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Took 1.03 seconds to deallocate network for instance. [ 780.050386] env[61905]: DEBUG nova.compute.claims [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 780.050559] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.163069] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.358751] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Releasing lock "refresh_cache-090f2b46-e4f1-4b6b-b596-dd1937969007" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.358751] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 780.358751] env[61905]: DEBUG nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 780.358751] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 780.385961] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 780.508922] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ed2443-6738-4181-941d-c19da52af18c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.520877] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f36e8b-2096-4141-9bce-187e30038fa1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.333340] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.333340] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 781.333340] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.334831] env[61905]: DEBUG nova.network.neutron [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.335947] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9214315d-ddce-43c6-a786-48f4db73d4ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.339185] env[61905]: DEBUG nova.compute.manager [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Received event network-changed-4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 781.339185] env[61905]: DEBUG nova.compute.manager [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Refreshing instance network info cache due to event network-changed-4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 781.339185] env[61905]: DEBUG oslo_concurrency.lockutils [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] Acquiring lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.339430] env[61905]: DEBUG oslo_concurrency.lockutils [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] Acquired lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.339430] env[61905]: DEBUG nova.network.neutron [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Refreshing network info cache for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.341265] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c35da35-d02b-498c-9498-eb9af720fa85 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.351990] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664d4f6f-01bb-4ce1-bad7-48e0264ff9aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.359107] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b03581d-f22c-4279-a415-6268f5d44394 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.379792] env[61905]: DEBUG nova.compute.provider_tree [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.384935] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 79537eaa-5abf-477b-bce6-c079c9beb964 could not be found. [ 781.384935] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.385061] env[61905]: INFO nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Took 0.05 seconds to destroy the instance on the hypervisor. [ 781.385260] env[61905]: DEBUG oslo.service.loopingcall [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.386030] env[61905]: DEBUG nova.compute.manager [-] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.386030] env[61905]: DEBUG nova.network.neutron [-] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 781.406308] env[61905]: DEBUG nova.network.neutron [-] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.845530] env[61905]: INFO nova.compute.manager [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] [instance: 090f2b46-e4f1-4b6b-b596-dd1937969007] Took 1.49 seconds to deallocate network for instance. [ 781.868199] env[61905]: DEBUG nova.network.neutron [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.888069] env[61905]: DEBUG nova.scheduler.client.report [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 781.910153] env[61905]: DEBUG nova.network.neutron [-] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.954775] env[61905]: DEBUG nova.network.neutron [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.316323] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.316323] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.396298] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.214s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.396298] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 782.397092] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.807s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.398737] env[61905]: INFO nova.compute.claims [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.413031] env[61905]: INFO nova.compute.manager [-] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Took 1.03 seconds to deallocate network for instance. [ 782.417070] env[61905]: DEBUG nova.compute.claims [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 782.417070] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.458296] env[61905]: DEBUG oslo_concurrency.lockutils [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] Releasing lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.458296] env[61905]: DEBUG nova.compute.manager [req-56d531f4-d7e4-4dc9-83c7-d966c49ee89a req-22615a02-cf40-4f17-a187-4268ef3b35b8 service nova] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Received event network-vif-deleted-4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 782.878014] env[61905]: INFO nova.scheduler.client.report [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Deleted allocations for instance 090f2b46-e4f1-4b6b-b596-dd1937969007 [ 782.903576] env[61905]: DEBUG nova.compute.utils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 782.907623] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 782.907843] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 782.962274] env[61905]: DEBUG nova.policy [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca38ae1caae34a1a98e5b1ec3a76c03d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3708b877cd4d4699899a42fc08340653', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.237154] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Successfully created port: 589a607c-ddc7-44a8-8afc-ed70afde064f {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 783.388765] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8976fcaf-167c-42c9-8e21-169bd2b9a85b tempest-ServersTestJSON-1885687925 tempest-ServersTestJSON-1885687925-project-member] Lock "090f2b46-e4f1-4b6b-b596-dd1937969007" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 161.609s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.411225] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 783.690453] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0019cb20-0024-4654-980a-65a33afe8e59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.698584] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d877d334-3308-4532-b768-47a6d670bfa1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.732468] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43681b82-93c1-4ec3-ad92-1c5f0d2aa4cd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.740032] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b78065-04f2-4c1a-b58d-7113b033e4a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.754519] env[61905]: DEBUG nova.compute.provider_tree [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.894162] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 784.102118] env[61905]: DEBUG nova.compute.manager [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Received event network-changed-589a607c-ddc7-44a8-8afc-ed70afde064f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 784.102118] env[61905]: DEBUG nova.compute.manager [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Refreshing instance network info cache due to event network-changed-589a607c-ddc7-44a8-8afc-ed70afde064f. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 784.102118] env[61905]: DEBUG oslo_concurrency.lockutils [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] Acquiring lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.102463] env[61905]: DEBUG oslo_concurrency.lockutils [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] Acquired lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.102783] env[61905]: DEBUG nova.network.neutron [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Refreshing network info cache for port 589a607c-ddc7-44a8-8afc-ed70afde064f {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.199288] env[61905]: ERROR nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 784.199288] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.199288] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.199288] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.199288] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.199288] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.199288] env[61905]: ERROR nova.compute.manager raise self.value [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.199288] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 784.199288] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.199288] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 784.199722] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.199722] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 784.199722] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 784.199722] env[61905]: ERROR nova.compute.manager [ 784.199722] env[61905]: Traceback (most recent call last): [ 784.199722] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 784.199722] env[61905]: listener.cb(fileno) [ 784.199722] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.199722] env[61905]: result = function(*args, **kwargs) [ 784.199722] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.199722] env[61905]: return func(*args, **kwargs) [ 784.199722] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.199722] env[61905]: raise e [ 784.199722] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.199722] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 784.199722] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.199722] env[61905]: created_port_ids = self._update_ports_for_instance( [ 784.199722] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.199722] env[61905]: with excutils.save_and_reraise_exception(): [ 784.199722] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.199722] env[61905]: self.force_reraise() [ 784.199722] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.199722] env[61905]: raise self.value [ 784.199722] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.199722] env[61905]: updated_port = self._update_port( [ 784.199722] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.199722] env[61905]: _ensure_no_port_binding_failure(port) [ 784.199722] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.199722] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 784.200364] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 784.200364] env[61905]: Removing descriptor: 17 [ 784.257908] env[61905]: DEBUG nova.scheduler.client.report [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.415920] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.424228] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 784.448242] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.448492] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.448653] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.448830] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.449028] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.449201] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.449416] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.449571] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.449732] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.449887] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.450065] env[61905]: DEBUG nova.virt.hardware [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.450908] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4e9c7a-be99-4881-bbf6-bc35c509bb97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.458911] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444494dc-e99e-42ca-8a77-7f0439ddbb39 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.471961] env[61905]: ERROR nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Traceback (most recent call last): [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] yield resources [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self.driver.spawn(context, instance, image_meta, [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self._vmops.spawn(context, instance, image_meta, injected_files, [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] vm_ref = self.build_virtual_machine(instance, [ 784.471961] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] vif_infos = vmwarevif.get_vif_info(self._session, [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] for vif in network_info: [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] return self._sync_wrapper(fn, *args, **kwargs) [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self.wait() [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self[:] = self._gt.wait() [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] return self._exit_event.wait() [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 784.472277] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] current.throw(*self._exc) [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] result = function(*args, **kwargs) [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] return func(*args, **kwargs) [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] raise e [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] nwinfo = self.network_api.allocate_for_instance( [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] created_port_ids = self._update_ports_for_instance( [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] with excutils.save_and_reraise_exception(): [ 784.472558] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self.force_reraise() [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] raise self.value [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] updated_port = self._update_port( [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] _ensure_no_port_binding_failure(port) [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] raise exception.PortBindingFailed(port_id=port['id']) [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 784.472841] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] [ 784.472841] env[61905]: INFO nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Terminating instance [ 784.474236] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquiring lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.625036] env[61905]: DEBUG nova.network.neutron [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.719020] env[61905]: DEBUG nova.network.neutron [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.763135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.763663] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.770081] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.166s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.771885] env[61905]: INFO nova.compute.claims [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.221412] env[61905]: DEBUG oslo_concurrency.lockutils [req-9b463361-e16e-487f-9bd7-06cf6cc0bf45 req-28ffcfd8-c2a1-4700-a5c4-51a5bea43fc0 service nova] Releasing lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.221832] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquired lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.222034] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.276464] env[61905]: DEBUG nova.compute.utils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.281088] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 785.281254] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 785.488296] env[61905]: DEBUG nova.policy [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ff25da762d5421b9f1e24e4bcead22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cd0317a9e0e4f1d86c49a82e8ffbaa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.749129] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.787064] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 785.943624] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.043404] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Successfully created port: de5272fe-50f3-40b4-8832-c646001368da {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.111126] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8cddd3-fc41-4297-9d88-d1294d38d000 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.120100] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067786ac-1113-4c4d-a7d7-8f634817cf87 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.150029] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d36ce00-47ad-4cf0-b1ff-29f5834583f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.157310] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81eedae5-73f9-4ca6-851f-1f0666acfa06 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.170666] env[61905]: DEBUG nova.compute.provider_tree [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.186105] env[61905]: DEBUG nova.compute.manager [req-fe6256b7-a774-4d70-98ca-8b520e5f9db6 req-25c612c6-4474-403d-9a9e-672badcb7587 service nova] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Received event network-vif-deleted-589a607c-ddc7-44a8-8afc-ed70afde064f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.449520] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Releasing lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.450031] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 786.450451] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.450574] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3d2efa2-3469-4085-82dd-64debb44c529 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.462710] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a41fc8-605e-4224-a0a3-fbf865d95663 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.487135] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ad9206a-5562-43a6-87a4-869f93b10933 could not be found. [ 786.487533] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 786.487645] env[61905]: INFO nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Took 0.04 seconds to destroy the instance on the hypervisor. [ 786.487898] env[61905]: DEBUG oslo.service.loopingcall [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.488143] env[61905]: DEBUG nova.compute.manager [-] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 786.488253] env[61905]: DEBUG nova.network.neutron [-] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 786.509918] env[61905]: DEBUG nova.network.neutron [-] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 786.677434] env[61905]: DEBUG nova.scheduler.client.report [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.808196] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.833909] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.834167] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.834320] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.834497] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.834637] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.834779] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.834983] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.835148] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.835307] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.835462] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.835640] env[61905]: DEBUG nova.virt.hardware [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.836839] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2ba604-34b0-4ca1-b6e5-f98172461d1e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.846656] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52526435-c636-4384-b57b-abe5c03bf65a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.017045] env[61905]: DEBUG nova.network.neutron [-] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.062876] env[61905]: ERROR nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 787.062876] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.062876] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.062876] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.062876] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.062876] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.062876] env[61905]: ERROR nova.compute.manager raise self.value [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.062876] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 787.062876] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.062876] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 787.063315] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.063315] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 787.063315] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 787.063315] env[61905]: ERROR nova.compute.manager [ 787.063315] env[61905]: Traceback (most recent call last): [ 787.063315] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 787.063315] env[61905]: listener.cb(fileno) [ 787.063315] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.063315] env[61905]: result = function(*args, **kwargs) [ 787.063315] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 787.063315] env[61905]: return func(*args, **kwargs) [ 787.063315] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.063315] env[61905]: raise e [ 787.063315] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.063315] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 787.063315] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.063315] env[61905]: created_port_ids = self._update_ports_for_instance( [ 787.063315] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.063315] env[61905]: with excutils.save_and_reraise_exception(): [ 787.063315] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.063315] env[61905]: self.force_reraise() [ 787.063315] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.063315] env[61905]: raise self.value [ 787.063315] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.063315] env[61905]: updated_port = self._update_port( [ 787.063315] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.063315] env[61905]: _ensure_no_port_binding_failure(port) [ 787.063315] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.063315] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 787.064074] env[61905]: nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 787.064074] env[61905]: Removing descriptor: 17 [ 787.064074] env[61905]: ERROR nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Traceback (most recent call last): [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] yield resources [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self.driver.spawn(context, instance, image_meta, [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 787.064074] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] vm_ref = self.build_virtual_machine(instance, [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] vif_infos = vmwarevif.get_vif_info(self._session, [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] for vif in network_info: [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return self._sync_wrapper(fn, *args, **kwargs) [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self.wait() [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self[:] = self._gt.wait() [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return self._exit_event.wait() [ 787.064407] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] result = hub.switch() [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return self.greenlet.switch() [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] result = function(*args, **kwargs) [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return func(*args, **kwargs) [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] raise e [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] nwinfo = self.network_api.allocate_for_instance( [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 787.064815] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] created_port_ids = self._update_ports_for_instance( [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] with excutils.save_and_reraise_exception(): [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self.force_reraise() [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] raise self.value [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] updated_port = self._update_port( [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] _ensure_no_port_binding_failure(port) [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.065172] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] raise exception.PortBindingFailed(port_id=port['id']) [ 787.065449] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 787.065449] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] [ 787.065449] env[61905]: INFO nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Terminating instance [ 787.066433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.066589] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.066751] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.182209] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.182727] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 787.185268] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.545s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.519370] env[61905]: INFO nova.compute.manager [-] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Took 1.03 seconds to deallocate network for instance. [ 787.521806] env[61905]: DEBUG nova.compute.claims [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 787.521984] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.582294] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.653676] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.689588] env[61905]: DEBUG nova.compute.utils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.690917] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 787.691106] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 787.752066] env[61905]: DEBUG nova.policy [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c384eecf00f94ac8b14dc9374c3cc07c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '087dc5ebfc47453eb42a96a28550b39d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 787.983023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5bb2fe-a939-4d8a-86c0-fa77df58cd42 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.989700] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd9fdf4-6ffc-4087-9690-c0d67926238e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.020992] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe2edcd-4e39-4de6-8541-fadff880ce45 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.029849] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413e959d-3275-4b59-a2a0-0684eecfe610 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.042251] env[61905]: DEBUG nova.compute.provider_tree [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.082909] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Successfully created port: 097dded6-754c-4031-8dd4-acb71be06d83 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 788.156655] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.157271] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 788.157271] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.157570] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d473aadd-9c87-4e86-92d3-52b9d362a32c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.167691] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1d445b-270c-4fcc-84f6-0d7a4f370aa2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.191331] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0bed6b1c-237b-469d-9f9b-0c4c84550ffb could not be found. [ 788.192129] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.192129] env[61905]: INFO nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Took 0.03 seconds to destroy the instance on the hypervisor. [ 788.192216] env[61905]: DEBUG oslo.service.loopingcall [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.192379] env[61905]: DEBUG nova.compute.manager [-] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 788.192755] env[61905]: DEBUG nova.network.neutron [-] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.197149] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 788.228970] env[61905]: DEBUG nova.network.neutron [-] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.243166] env[61905]: DEBUG nova.compute.manager [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Received event network-changed-de5272fe-50f3-40b4-8832-c646001368da {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.243166] env[61905]: DEBUG nova.compute.manager [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Refreshing instance network info cache due to event network-changed-de5272fe-50f3-40b4-8832-c646001368da. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 788.243166] env[61905]: DEBUG oslo_concurrency.lockutils [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] Acquiring lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.243166] env[61905]: DEBUG oslo_concurrency.lockutils [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] Acquired lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.243558] env[61905]: DEBUG nova.network.neutron [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Refreshing network info cache for port de5272fe-50f3-40b4-8832-c646001368da {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.545647] env[61905]: DEBUG nova.scheduler.client.report [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.735527] env[61905]: DEBUG nova.network.neutron [-] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.774353] env[61905]: DEBUG nova.network.neutron [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.951187] env[61905]: DEBUG nova.network.neutron [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.054710] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.869s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.055820] env[61905]: ERROR nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Traceback (most recent call last): [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self.driver.spawn(context, instance, image_meta, [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] vm_ref = self.build_virtual_machine(instance, [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 789.055820] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] for vif in network_info: [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return self._sync_wrapper(fn, *args, **kwargs) [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self.wait() [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self[:] = self._gt.wait() [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return self._exit_event.wait() [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] result = hub.switch() [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 789.056319] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return self.greenlet.switch() [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] result = function(*args, **kwargs) [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] return func(*args, **kwargs) [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] raise e [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] nwinfo = self.network_api.allocate_for_instance( [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] created_port_ids = self._update_ports_for_instance( [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] with excutils.save_and_reraise_exception(): [ 789.056842] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] self.force_reraise() [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] raise self.value [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] updated_port = self._update_port( [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] _ensure_no_port_binding_failure(port) [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] raise exception.PortBindingFailed(port_id=port['id']) [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] nova.exception.PortBindingFailed: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. [ 789.057620] env[61905]: ERROR nova.compute.manager [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] [ 789.058117] env[61905]: DEBUG nova.compute.utils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 789.062719] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Build of instance 9222cafc-fcee-40b9-b6c3-f1cf677324ac was re-scheduled: Binding failed for port b81d5da7-8d7e-4da1-83af-badb397ecd37, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 789.062719] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 789.062719] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.062719] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.062719] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.062983] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.849s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.210214] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 789.240479] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 789.240724] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 789.240879] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.241064] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 789.241202] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.241341] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 789.241539] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 789.241691] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 789.241849] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 789.241999] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 789.245083] env[61905]: DEBUG nova.virt.hardware [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 789.245083] env[61905]: INFO nova.compute.manager [-] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Took 1.05 seconds to deallocate network for instance. [ 789.245083] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40877115-c99b-497e-9d98-f802352f1dd7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.248321] env[61905]: DEBUG nova.compute.claims [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 789.248494] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.254458] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fade2580-8954-4400-9c8e-12e8887bf4a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.453872] env[61905]: DEBUG oslo_concurrency.lockutils [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] Releasing lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.454219] env[61905]: DEBUG nova.compute.manager [req-8fa9e986-b104-4218-b61b-71b651893d0f req-2550fdc0-0c79-49f2-8446-afb726001d4e service nova] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Received event network-vif-deleted-de5272fe-50f3-40b4-8832-c646001368da {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 789.605423] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.767940] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.919536] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80615d70-f079-4474-a2b6-35bbe79b66fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.927386] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd093bc-6e43-4644-8468-edfb588819b9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.963733] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473f26de-cec7-44ef-940e-a71989cb15e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.974248] env[61905]: ERROR nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 789.974248] env[61905]: ERROR nova.compute.manager Traceback (most recent call last): [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 789.974248] env[61905]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 789.974248] env[61905]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 789.974248] env[61905]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 789.974248] env[61905]: ERROR nova.compute.manager self.force_reraise() [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 789.974248] env[61905]: ERROR nova.compute.manager raise self.value [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 789.974248] env[61905]: ERROR nova.compute.manager updated_port = self._update_port( [ 789.974248] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 789.974248] env[61905]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 789.974683] env[61905]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 789.974683] env[61905]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 789.974683] env[61905]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 789.974683] env[61905]: ERROR nova.compute.manager [ 789.974787] env[61905]: Traceback (most recent call last): [ 789.974815] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 789.974815] env[61905]: listener.cb(fileno) [ 789.974815] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 789.974815] env[61905]: result = function(*args, **kwargs) [ 789.974815] env[61905]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 789.974815] env[61905]: return func(*args, **kwargs) [ 789.974956] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 789.974956] env[61905]: raise e [ 789.974956] env[61905]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 789.974956] env[61905]: nwinfo = self.network_api.allocate_for_instance( [ 789.974956] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 789.974956] env[61905]: created_port_ids = self._update_ports_for_instance( [ 789.974956] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 789.974956] env[61905]: with excutils.save_and_reraise_exception(): [ 789.974956] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 789.974956] env[61905]: self.force_reraise() [ 789.974956] env[61905]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 789.974956] env[61905]: raise self.value [ 789.974956] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 789.974956] env[61905]: updated_port = self._update_port( [ 789.974956] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 789.974956] env[61905]: _ensure_no_port_binding_failure(port) [ 789.974956] env[61905]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 789.974956] env[61905]: raise exception.PortBindingFailed(port_id=port['id']) [ 789.974956] env[61905]: nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 789.974956] env[61905]: Removing descriptor: 17 [ 789.976519] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edda05a4-3e15-4774-a908-5d39f65ed216 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.981083] env[61905]: ERROR nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Traceback (most recent call last): [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] yield resources [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self.driver.spawn(context, instance, image_meta, [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] vm_ref = self.build_virtual_machine(instance, [ 789.981083] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] vif_infos = vmwarevif.get_vif_info(self._session, [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] for vif in network_info: [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return self._sync_wrapper(fn, *args, **kwargs) [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self.wait() [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self[:] = self._gt.wait() [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return self._exit_event.wait() [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 789.981430] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] result = hub.switch() [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return self.greenlet.switch() [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] result = function(*args, **kwargs) [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return func(*args, **kwargs) [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] raise e [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] nwinfo = self.network_api.allocate_for_instance( [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] created_port_ids = self._update_ports_for_instance( [ 789.981772] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] with excutils.save_and_reraise_exception(): [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self.force_reraise() [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] raise self.value [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] updated_port = self._update_port( [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] _ensure_no_port_binding_failure(port) [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] raise exception.PortBindingFailed(port_id=port['id']) [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 789.982188] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] [ 789.982523] env[61905]: INFO nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Terminating instance [ 789.986134] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.986134] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.986134] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.994649] env[61905]: DEBUG nova.compute.provider_tree [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.024508] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.181744] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.269714] env[61905]: DEBUG nova.compute.manager [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Received event network-changed-097dded6-754c-4031-8dd4-acb71be06d83 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 790.269842] env[61905]: DEBUG nova.compute.manager [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Refreshing instance network info cache due to event network-changed-097dded6-754c-4031-8dd4-acb71be06d83. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 790.270104] env[61905]: DEBUG oslo_concurrency.lockutils [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] Acquiring lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.272252] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-9222cafc-fcee-40b9-b6c3-f1cf677324ac" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.273335] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 790.273335] env[61905]: DEBUG nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 790.273335] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.293946] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.500359] env[61905]: DEBUG nova.scheduler.client.report [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 790.685809] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.686336] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 790.686547] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.686938] env[61905]: DEBUG oslo_concurrency.lockutils [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] Acquired lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.687132] env[61905]: DEBUG nova.network.neutron [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Refreshing network info cache for port 097dded6-754c-4031-8dd4-acb71be06d83 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.689228] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb76c92f-1584-4a28-8feb-9d32941d7865 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.700139] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4bc2342-7ba9-4367-98cb-1aea25f1fb74 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.723683] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ef98f37-9059-4658-9679-fb50dc812eb5 could not be found. [ 790.723905] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 790.724098] env[61905]: INFO nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 790.724347] env[61905]: DEBUG oslo.service.loopingcall [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.724578] env[61905]: DEBUG nova.compute.manager [-] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 790.724672] env[61905]: DEBUG nova.network.neutron [-] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.741818] env[61905]: DEBUG nova.network.neutron [-] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 790.796791] env[61905]: DEBUG nova.network.neutron [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.004817] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.943s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.005463] env[61905]: ERROR nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Traceback (most recent call last): [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self.driver.spawn(context, instance, image_meta, [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] vm_ref = self.build_virtual_machine(instance, [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.005463] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] for vif in network_info: [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] return self._sync_wrapper(fn, *args, **kwargs) [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self.wait() [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self[:] = self._gt.wait() [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] return self._exit_event.wait() [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] current.throw(*self._exc) [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.005994] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] result = function(*args, **kwargs) [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] return func(*args, **kwargs) [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] raise e [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] nwinfo = self.network_api.allocate_for_instance( [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] created_port_ids = self._update_ports_for_instance( [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] with excutils.save_and_reraise_exception(): [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] self.force_reraise() [ 791.006340] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] raise self.value [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] updated_port = self._update_port( [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] _ensure_no_port_binding_failure(port) [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] raise exception.PortBindingFailed(port_id=port['id']) [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] nova.exception.PortBindingFailed: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. [ 791.006661] env[61905]: ERROR nova.compute.manager [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] [ 791.006661] env[61905]: DEBUG nova.compute.utils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 791.007886] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.062s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.011867] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Build of instance 1b57ca4b-a8b9-497f-bc81-71c31510093e was re-scheduled: Binding failed for port 231af9a2-1f63-4de2-877b-d007606490f4, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 791.012985] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 791.013258] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquiring lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.013409] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Acquired lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.013570] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.215206] env[61905]: DEBUG nova.network.neutron [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.245595] env[61905]: DEBUG nova.network.neutron [-] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.302015] env[61905]: INFO nova.compute.manager [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 9222cafc-fcee-40b9-b6c3-f1cf677324ac] Took 1.03 seconds to deallocate network for instance. [ 791.333351] env[61905]: DEBUG nova.network.neutron [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.538887] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.714848] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.748313] env[61905]: INFO nova.compute.manager [-] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Took 1.02 seconds to deallocate network for instance. [ 791.753691] env[61905]: DEBUG nova.compute.claims [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Aborting claim: {{(pid=61905) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 791.753889] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.835520] env[61905]: DEBUG oslo_concurrency.lockutils [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] Releasing lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.835783] env[61905]: DEBUG nova.compute.manager [req-714a5440-2502-41d6-8fd9-534f182edf83 req-09c13c02-f5ca-4cd9-8fb9-c982cebc3723 service nova] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Received event network-vif-deleted-097dded6-754c-4031-8dd4-acb71be06d83 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 792.217187] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Releasing lock "refresh_cache-1b57ca4b-a8b9-497f-bc81-71c31510093e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.217432] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 792.217615] env[61905]: DEBUG nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 792.217839] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.233045] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.334303] env[61905]: INFO nova.scheduler.client.report [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance 9222cafc-fcee-40b9-b6c3-f1cf677324ac [ 792.558032] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9222cafc-fcee-40b9-b6c3-f1cf677324ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 792.735766] env[61905]: DEBUG nova.network.neutron [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.843866] env[61905]: DEBUG oslo_concurrency.lockutils [None req-31172074-92b5-4c84-828f-d19ce93e7cb0 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "9222cafc-fcee-40b9-b6c3-f1cf677324ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 168.855s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.024740] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.025052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.060964] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1b57ca4b-a8b9-497f-bc81-71c31510093e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.060964] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 793.061097] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 793.061912] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 79537eaa-5abf-477b-bce6-c079c9beb964 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 793.061912] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 3ad9206a-5562-43a6-87a4-869f93b10933 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 793.061912] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 0bed6b1c-237b-469d-9f9b-0c4c84550ffb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 793.061912] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 8ef98f37-9059-4658-9679-fb50dc812eb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 793.238908] env[61905]: INFO nova.compute.manager [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] [instance: 1b57ca4b-a8b9-497f-bc81-71c31510093e] Took 1.02 seconds to deallocate network for instance. [ 793.348310] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 793.564707] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 60e68738-a333-44b2-a1e8-0b3da728059e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.877480] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.067920] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a4a03b8a-3206-4684-9d85-0e60ac643175 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.264551] env[61905]: INFO nova.scheduler.client.report [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Deleted allocations for instance 1b57ca4b-a8b9-497f-bc81-71c31510093e [ 794.415229] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "e3b11ed6-b703-43a6-a528-28520ed43233" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.415456] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "e3b11ed6-b703-43a6-a528-28520ed43233" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.571227] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d31570f0-7662-4e13-9dee-51dc66728acc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.775699] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ba08035-d1c9-4f96-811b-b2207eba16a7 tempest-AttachInterfacesUnderV243Test-459804726 tempest-AttachInterfacesUnderV243Test-459804726-project-member] Lock "1b57ca4b-a8b9-497f-bc81-71c31510093e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 166.873s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.073759] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance fed05097-de84-4617-bf9e-7fc116ebc56e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.278937] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 795.576866] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ba3a1e36-a9f8-4482-908e-9c949c6f42ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.802821] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.080694] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance aeb72a57-d319-479d-a1c7-3cebc6f73f09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.583506] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 74f94a46-63e4-44e0-9142-7e7d46cd31a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.087141] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 111d10e8-7e36-48b6-be45-2275c36fbee4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.594870] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 12c21d8e-1941-4481-9216-015ba6c09b9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.097938] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a9ac365e-2be1-438d-a514-6fa7b26fa10c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 798.601129] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e1a22f3e-4557-44d2-8e34-cc75f573fe41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.103838] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a6e45dd1-e0ee-4bda-9513-4b1000e15e49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.607217] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9a385d72-ba5d-48e0-b71f-d37d4e63c403 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.110027] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 4bb7a2df-b472-4f6d-8a01-a55d0b86efda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.110539] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 800.110539] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 800.374205] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fba27be-cdc2-4d8f-a8e5-acfebf85a734 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.382265] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2628418b-01b1-440f-984d-8f94b49d5b9b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.411474] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e37bea-0732-4e35-ab98-d51a54c41e01 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.418865] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1bc425-bec2-4e66-87ac-cd53d2a0c4b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.433039] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.937082] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 801.441860] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 801.442143] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.434s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.442424] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.978s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.445448] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.445596] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Cleaning up deleted instances {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 801.949956] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] There are 5 instances to clean {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 801.950270] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 84428003-72b1-467a-baf5-06ac37205622] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 802.184210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69fb500d-0b9c-4df0-9778-77bddc705685 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.191455] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5521df59-9276-47db-a8ce-8e5ce14a9399 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.220136] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605c038e-146c-4a8e-8522-a55dd84eb5b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.226993] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2157b42d-96bb-4a0d-9a3c-a35778544dc9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.240479] env[61905]: DEBUG nova.compute.provider_tree [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.457666] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e6a063b4-d4f8-46ae-89ae-2d66637896ae] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 802.743725] env[61905]: DEBUG nova.scheduler.client.report [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.960794] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b9199119-9d4e-4b04-8675-22f6680da8b1] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 803.248166] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.806s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.248863] env[61905]: ERROR nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Traceback (most recent call last): [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self.driver.spawn(context, instance, image_meta, [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] vm_ref = self.build_virtual_machine(instance, [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.248863] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] for vif in network_info: [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return self._sync_wrapper(fn, *args, **kwargs) [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self.wait() [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self[:] = self._gt.wait() [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return self._exit_event.wait() [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] result = hub.switch() [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.249390] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return self.greenlet.switch() [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] result = function(*args, **kwargs) [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] return func(*args, **kwargs) [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] raise e [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] nwinfo = self.network_api.allocate_for_instance( [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] created_port_ids = self._update_ports_for_instance( [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] with excutils.save_and_reraise_exception(): [ 803.249734] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] self.force_reraise() [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] raise self.value [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] updated_port = self._update_port( [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] _ensure_no_port_binding_failure(port) [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] raise exception.PortBindingFailed(port_id=port['id']) [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] nova.exception.PortBindingFailed: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. [ 803.250102] env[61905]: ERROR nova.compute.manager [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] [ 803.250390] env[61905]: DEBUG nova.compute.utils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 803.251092] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.942s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.252270] env[61905]: INFO nova.compute.claims [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.255033] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Build of instance 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8 was re-scheduled: Binding failed for port 4fe9bda5-26c4-47c0-b3b8-87292950204d, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 803.255323] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 803.255544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquiring lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.255714] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Acquired lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.255883] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 803.464166] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 2a8bcc04-5519-4890-839b-64dcf422526d] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 803.776519] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.832692] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.967683] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 72770472-1b79-4408-b32c-34e56fd27c45] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 804.335437] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Releasing lock "refresh_cache-1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.335667] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 804.335829] env[61905]: DEBUG nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 804.335996] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.351057] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.471461] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.471461] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Cleaning up deleted instances with incomplete migration {{(pid=61905) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 804.491989] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359c3869-4da8-4976-8148-0cdc7f3c7f91 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.499998] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c030e7af-db98-4093-a23b-c759690ee404 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.532017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e57a7b-4654-4fe2-9e1e-aeb0eb8e7157 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.539495] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ad78f3-3493-4b87-a73c-a8f1b346dbcd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.552550] env[61905]: DEBUG nova.compute.provider_tree [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.853326] env[61905]: DEBUG nova.network.neutron [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.976064] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.055443] env[61905]: DEBUG nova.scheduler.client.report [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.355907] env[61905]: INFO nova.compute.manager [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] [instance: 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8] Took 1.02 seconds to deallocate network for instance. [ 805.561044] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.310s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.561594] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 805.564088] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.513s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.920434] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.920666] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.920812] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 806.069257] env[61905]: DEBUG nova.compute.utils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 806.073678] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 806.073859] env[61905]: DEBUG nova.network.neutron [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 806.116625] env[61905]: DEBUG nova.policy [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c384eecf00f94ac8b14dc9374c3cc07c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '087dc5ebfc47453eb42a96a28550b39d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 806.314697] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1789a731-b867-44be-85de-87a4cbd38b56 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.322793] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c677fdc3-a1c2-45c6-bb15-77f955b5bdf6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.352470] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9ccbb0-aeb0-432f-9d56-feac7709e7d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.360126] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475e287a-bfeb-4de8-a83b-acee7464f0ff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.375638] env[61905]: DEBUG nova.compute.provider_tree [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.401652] env[61905]: INFO nova.scheduler.client.report [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Deleted allocations for instance 1232fcf1-4da1-4e1f-b693-8f97f19e4ea8 [ 806.415066] env[61905]: DEBUG nova.network.neutron [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Successfully created port: 257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.423132] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Didn't find any instances for network info cache update. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 806.423358] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.423521] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.423668] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.423815] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.423955] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.424119] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.424244] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 806.424406] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.574843] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 806.880619] env[61905]: DEBUG nova.scheduler.client.report [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.907153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39b738f6-7e90-40ea-86b3-79d063f6df1e tempest-ServerDiagnosticsNegativeTest-489453464 tempest-ServerDiagnosticsNegativeTest-489453464-project-member] Lock "1232fcf1-4da1-4e1f-b693-8f97f19e4ea8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 177.039s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.930664] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.388028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.823s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.388028] env[61905]: ERROR nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Traceback (most recent call last): [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self.driver.spawn(context, instance, image_meta, [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.388028] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] vm_ref = self.build_virtual_machine(instance, [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] for vif in network_info: [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] return self._sync_wrapper(fn, *args, **kwargs) [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self.wait() [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self[:] = self._gt.wait() [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] return self._exit_event.wait() [ 807.388420] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] current.throw(*self._exc) [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] result = function(*args, **kwargs) [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] return func(*args, **kwargs) [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] raise e [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] nwinfo = self.network_api.allocate_for_instance( [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] created_port_ids = self._update_ports_for_instance( [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 807.388734] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] with excutils.save_and_reraise_exception(): [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] self.force_reraise() [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] raise self.value [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] updated_port = self._update_port( [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] _ensure_no_port_binding_failure(port) [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] raise exception.PortBindingFailed(port_id=port['id']) [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] nova.exception.PortBindingFailed: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. [ 807.389105] env[61905]: ERROR nova.compute.manager [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] [ 807.389421] env[61905]: DEBUG nova.compute.utils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 807.390726] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.974s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.394029] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Build of instance d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14 was re-scheduled: Binding failed for port 8fe0ee29-a1ac-4179-8359-d2b2408f3a2e, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 807.394299] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 807.394524] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquiring lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.394666] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Acquired lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.394819] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.409896] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 807.585506] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 807.611671] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.611671] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.611829] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.611966] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.612129] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.612274] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.612475] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.612627] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.612783] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.612937] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.613119] env[61905]: DEBUG nova.virt.hardware [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.614288] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09108d4-d2e8-4edc-b0d8-6ac009fbf92f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.622045] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0705a2-db19-4f1a-92f1-545026cb6a34 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.928025] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.938162] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.954163] env[61905]: DEBUG nova.compute.manager [req-e9ff518e-82db-442b-8076-5a776c924ba8 req-616d272e-405c-41ab-9811-e5880f66f143 service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Received event network-vif-plugged-257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.954377] env[61905]: DEBUG oslo_concurrency.lockutils [req-e9ff518e-82db-442b-8076-5a776c924ba8 req-616d272e-405c-41ab-9811-e5880f66f143 service nova] Acquiring lock "60e68738-a333-44b2-a1e8-0b3da728059e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.954642] env[61905]: DEBUG oslo_concurrency.lockutils [req-e9ff518e-82db-442b-8076-5a776c924ba8 req-616d272e-405c-41ab-9811-e5880f66f143 service nova] Lock "60e68738-a333-44b2-a1e8-0b3da728059e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.954817] env[61905]: DEBUG oslo_concurrency.lockutils [req-e9ff518e-82db-442b-8076-5a776c924ba8 req-616d272e-405c-41ab-9811-e5880f66f143 service nova] Lock "60e68738-a333-44b2-a1e8-0b3da728059e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.954984] env[61905]: DEBUG nova.compute.manager [req-e9ff518e-82db-442b-8076-5a776c924ba8 req-616d272e-405c-41ab-9811-e5880f66f143 service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] No waiting events found dispatching network-vif-plugged-257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 807.955154] env[61905]: WARNING nova.compute.manager [req-e9ff518e-82db-442b-8076-5a776c924ba8 req-616d272e-405c-41ab-9811-e5880f66f143 service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Received unexpected event network-vif-plugged-257550cb-7da2-4dee-9d64-19c0c56c22e1 for instance with vm_state building and task_state spawning. [ 808.054311] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.123782] env[61905]: DEBUG nova.network.neutron [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Successfully updated port: 257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 808.194668] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e2d2d1-cd67-4f67-bb02-14722ec1e6c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.202423] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28f80a8-b8f1-4657-9bc3-bddb79274c31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.233877] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f218da2-83b1-456b-acc2-a5bc511496dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.242149] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca442ec-0e72-4465-a57c-c668a414d0fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.254933] env[61905]: DEBUG nova.compute.provider_tree [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.559616] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Releasing lock "refresh_cache-d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.559870] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 808.560062] env[61905]: DEBUG nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 808.560237] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.583783] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.626738] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.627019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.627019] env[61905]: DEBUG nova.network.neutron [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 808.758545] env[61905]: DEBUG nova.scheduler.client.report [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.087056] env[61905]: DEBUG nova.network.neutron [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.174447] env[61905]: DEBUG nova.network.neutron [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.264673] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.874s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.265568] env[61905]: ERROR nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Traceback (most recent call last): [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self.driver.spawn(context, instance, image_meta, [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self._vmops.spawn(context, instance, image_meta, injected_files, [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] vm_ref = self.build_virtual_machine(instance, [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] vif_infos = vmwarevif.get_vif_info(self._session, [ 809.265568] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] for vif in network_info: [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return self._sync_wrapper(fn, *args, **kwargs) [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self.wait() [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self[:] = self._gt.wait() [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return self._exit_event.wait() [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] result = hub.switch() [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 809.265958] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return self.greenlet.switch() [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] result = function(*args, **kwargs) [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] return func(*args, **kwargs) [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] raise e [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] nwinfo = self.network_api.allocate_for_instance( [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] created_port_ids = self._update_ports_for_instance( [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] with excutils.save_and_reraise_exception(): [ 809.266279] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] self.force_reraise() [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] raise self.value [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] updated_port = self._update_port( [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] _ensure_no_port_binding_failure(port) [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] raise exception.PortBindingFailed(port_id=port['id']) [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] nova.exception.PortBindingFailed: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. [ 809.266567] env[61905]: ERROR nova.compute.manager [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] [ 809.266827] env[61905]: DEBUG nova.compute.utils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 809.267786] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.852s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.269253] env[61905]: INFO nova.compute.claims [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.272864] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Build of instance 79537eaa-5abf-477b-bce6-c079c9beb964 was re-scheduled: Binding failed for port 4dfa77bf-0701-45ee-a9c0-e35ba87eb6e3, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 809.273231] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 809.273463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.273855] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.273947] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.360260] env[61905]: DEBUG nova.network.neutron [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Updating instance_info_cache with network_info: [{"id": "257550cb-7da2-4dee-9d64-19c0c56c22e1", "address": "fa:16:3e:75:fc:a1", "network": {"id": "6a998fd5-11f3-48f2-8147-5ecd7ec93243", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-188776877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087dc5ebfc47453eb42a96a28550b39d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap257550cb-7d", "ovs_interfaceid": "257550cb-7da2-4dee-9d64-19c0c56c22e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.590720] env[61905]: INFO nova.compute.manager [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] [instance: d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14] Took 1.03 seconds to deallocate network for instance. [ 809.800587] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.862676] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.863370] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Instance network_info: |[{"id": "257550cb-7da2-4dee-9d64-19c0c56c22e1", "address": "fa:16:3e:75:fc:a1", "network": {"id": "6a998fd5-11f3-48f2-8147-5ecd7ec93243", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-188776877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087dc5ebfc47453eb42a96a28550b39d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap257550cb-7d", "ovs_interfaceid": "257550cb-7da2-4dee-9d64-19c0c56c22e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 809.863773] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:fc:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62d6a386-ffdb-4232-83f3-cb21c5e59e85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '257550cb-7da2-4dee-9d64-19c0c56c22e1', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 809.874660] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Creating folder: Project (087dc5ebfc47453eb42a96a28550b39d). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.875083] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-620e6cc4-5cbe-4a2b-922d-282f5fa95dd4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.889542] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Created folder: Project (087dc5ebfc47453eb42a96a28550b39d) in parent group-v289968. [ 809.893019] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Creating folder: Instances. Parent ref: group-v289993. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 809.893019] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99889920-498d-4680-b92e-cf7ecc59bf8d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.900567] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Created folder: Instances in parent group-v289993. [ 809.900884] env[61905]: DEBUG oslo.service.loopingcall [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.901788] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.903073] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 809.903322] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-054ce99a-63fd-4322-815d-71a792730e04 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.926072] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.926072] env[61905]: value = "task-1362359" [ 809.926072] env[61905]: _type = "Task" [ 809.926072] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.934657] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362359, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.978339] env[61905]: DEBUG nova.compute.manager [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Received event network-changed-257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.978534] env[61905]: DEBUG nova.compute.manager [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Refreshing instance network info cache due to event network-changed-257550cb-7da2-4dee-9d64-19c0c56c22e1. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 809.978764] env[61905]: DEBUG oslo_concurrency.lockutils [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] Acquiring lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.978915] env[61905]: DEBUG oslo_concurrency.lockutils [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] Acquired lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.979113] env[61905]: DEBUG nova.network.neutron [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Refreshing network info cache for port 257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.421993] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "refresh_cache-79537eaa-5abf-477b-bce6-c079c9beb964" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.422106] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 810.422291] env[61905]: DEBUG nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.422457] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.441462] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362359, 'name': CreateVM_Task, 'duration_secs': 0.321139} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.442267] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.443426] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 810.450622] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.450789] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.451109] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 810.451356] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65afb963-b8e6-48b8-bb54-191f20b92c00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.456975] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 810.456975] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52621c8c-c5db-7b22-066c-2bf363dfc35c" [ 810.456975] env[61905]: _type = "Task" [ 810.456975] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.468460] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52621c8c-c5db-7b22-066c-2bf363dfc35c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.579542] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8afc4d5-8ef1-46ca-8c4b-a806b78c8b6a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.591208] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97be8b73-ff7b-434b-99bf-027ec83ea52a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.626898] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982f9af2-e139-4300-ac09-2b2c3d23e8ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.635166] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3a2d11-785a-4e69-a8d2-3fdd123089af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.651134] env[61905]: DEBUG nova.compute.provider_tree [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.654080] env[61905]: INFO nova.scheduler.client.report [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Deleted allocations for instance d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14 [ 810.789612] env[61905]: DEBUG nova.network.neutron [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Updated VIF entry in instance network info cache for port 257550cb-7da2-4dee-9d64-19c0c56c22e1. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 810.789733] env[61905]: DEBUG nova.network.neutron [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Updating instance_info_cache with network_info: [{"id": "257550cb-7da2-4dee-9d64-19c0c56c22e1", "address": "fa:16:3e:75:fc:a1", "network": {"id": "6a998fd5-11f3-48f2-8147-5ecd7ec93243", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-188776877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087dc5ebfc47453eb42a96a28550b39d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap257550cb-7d", "ovs_interfaceid": "257550cb-7da2-4dee-9d64-19c0c56c22e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.944571] env[61905]: DEBUG nova.network.neutron [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.969138] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52621c8c-c5db-7b22-066c-2bf363dfc35c, 'name': SearchDatastore_Task, 'duration_secs': 0.010708} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.969707] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.969791] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.970062] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.970256] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.970979] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.970979] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25b24c08-1dc9-4ac9-9bf9-72b679c0ea95 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.979388] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.979562] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.980277] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3cdf0bc-86ef-40db-8342-9a362079c928 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.985318] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 810.985318] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5254ee91-bd01-d74b-575b-d8c8166bcb7a" [ 810.985318] env[61905]: _type = "Task" [ 810.985318] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.994557] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5254ee91-bd01-d74b-575b-d8c8166bcb7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.165483] env[61905]: DEBUG nova.scheduler.client.report [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 811.170351] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e67395b-04d7-4ba1-b863-61dcc79d9a01 tempest-ServerActionsTestJSON-1075891075 tempest-ServerActionsTestJSON-1075891075-project-member] Lock "d04ce811-ce0b-4cd3-9eb0-9ad8ed402c14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 161.529s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.178051] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "38b80675-182a-422c-9222-aa78ed59c351" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.178305] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "38b80675-182a-422c-9222-aa78ed59c351" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.212576] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "2c919b69-0e09-431d-8a75-98d5740c7dab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.212804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.292555] env[61905]: DEBUG oslo_concurrency.lockutils [req-16ac885b-1d6c-4c71-94d0-11313db24987 req-a946bec6-2f35-4ccd-a40a-2ed8be52e3ae service nova] Releasing lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.451054] env[61905]: INFO nova.compute.manager [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 79537eaa-5abf-477b-bce6-c079c9beb964] Took 1.03 seconds to deallocate network for instance. [ 811.499393] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5254ee91-bd01-d74b-575b-d8c8166bcb7a, 'name': SearchDatastore_Task, 'duration_secs': 0.012019} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.502403] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c343e659-111b-4080-acff-410cfccf959d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.507634] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 811.507634] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5290277f-1d6c-ce22-8661-337b8fac4377" [ 811.507634] env[61905]: _type = "Task" [ 811.507634] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.519251] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5290277f-1d6c-ce22-8661-337b8fac4377, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.672647] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.673230] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 811.675961] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 811.678477] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.156s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.019418] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5290277f-1d6c-ce22-8661-337b8fac4377, 'name': SearchDatastore_Task, 'duration_secs': 0.010821} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.019702] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.019918] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 60e68738-a333-44b2-a1e8-0b3da728059e/60e68738-a333-44b2-a1e8-0b3da728059e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 812.020178] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05d18880-3efe-4db6-b4c1-60a711573341 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.026333] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 812.026333] env[61905]: value = "task-1362360" [ 812.026333] env[61905]: _type = "Task" [ 812.026333] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.034205] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.180517] env[61905]: DEBUG nova.compute.utils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 812.182124] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 812.182296] env[61905]: DEBUG nova.network.neutron [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 812.213508] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.263317] env[61905]: DEBUG nova.policy [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c384eecf00f94ac8b14dc9374c3cc07c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '087dc5ebfc47453eb42a96a28550b39d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 812.492142] env[61905]: INFO nova.scheduler.client.report [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleted allocations for instance 79537eaa-5abf-477b-bce6-c079c9beb964 [ 812.537669] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506723} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.538089] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 60e68738-a333-44b2-a1e8-0b3da728059e/60e68738-a333-44b2-a1e8-0b3da728059e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 812.538193] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 812.538646] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-989fa3e5-6bc5-4e67-a001-f2ef5759606a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.548107] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 812.548107] env[61905]: value = "task-1362361" [ 812.548107] env[61905]: _type = "Task" [ 812.548107] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.556043] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.595955] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100be766-e827-44ec-b08e-9571833f37b2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.604669] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a36befd-8a88-4c21-b0bd-18a752a365a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.637019] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd2ef44-9cb7-40f4-9c31-9207f31c3313 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.645384] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6fd214-28da-4114-817d-93d6dab19316 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.661032] env[61905]: DEBUG nova.compute.provider_tree [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.691844] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 812.767921] env[61905]: DEBUG nova.network.neutron [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Successfully created port: 28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 813.008141] env[61905]: DEBUG oslo_concurrency.lockutils [None req-67d6eaf0-c754-42cb-9c58-70d9b22d3d7c tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "79537eaa-5abf-477b-bce6-c079c9beb964" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.364s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.063657] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059526} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.064713] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.066136] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a0fd2e-9829-4eb9-a300-4466acc9614a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.099182] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 60e68738-a333-44b2-a1e8-0b3da728059e/60e68738-a333-44b2-a1e8-0b3da728059e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.099437] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e47f7103-10a8-45a1-8d00-3db2732c3e7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.121163] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 813.121163] env[61905]: value = "task-1362362" [ 813.121163] env[61905]: _type = "Task" [ 813.121163] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.129615] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.165151] env[61905]: DEBUG nova.scheduler.client.report [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.510600] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 813.631484] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362362, 'name': ReconfigVM_Task, 'duration_secs': 0.319175} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.631931] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 60e68738-a333-44b2-a1e8-0b3da728059e/60e68738-a333-44b2-a1e8-0b3da728059e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.632562] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d7fc076-bae1-4367-aa90-a792ffa34e76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.639544] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 813.639544] env[61905]: value = "task-1362363" [ 813.639544] env[61905]: _type = "Task" [ 813.639544] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.647132] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362363, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.671196] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.993s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.671811] env[61905]: ERROR nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Traceback (most recent call last): [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self.driver.spawn(context, instance, image_meta, [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] vm_ref = self.build_virtual_machine(instance, [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.671811] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] for vif in network_info: [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] return self._sync_wrapper(fn, *args, **kwargs) [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self.wait() [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self[:] = self._gt.wait() [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] return self._exit_event.wait() [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] current.throw(*self._exc) [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 813.672091] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] result = function(*args, **kwargs) [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] return func(*args, **kwargs) [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] raise e [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] nwinfo = self.network_api.allocate_for_instance( [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] created_port_ids = self._update_ports_for_instance( [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] with excutils.save_and_reraise_exception(): [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] self.force_reraise() [ 813.672415] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] raise self.value [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] updated_port = self._update_port( [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] _ensure_no_port_binding_failure(port) [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] raise exception.PortBindingFailed(port_id=port['id']) [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] nova.exception.PortBindingFailed: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. [ 813.672883] env[61905]: ERROR nova.compute.manager [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] [ 813.673133] env[61905]: DEBUG nova.compute.utils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 813.674299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.426s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.677413] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Build of instance 3ad9206a-5562-43a6-87a4-869f93b10933 was re-scheduled: Binding failed for port 589a607c-ddc7-44a8-8afc-ed70afde064f, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 813.680555] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 813.680555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquiring lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.680555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Acquired lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.680555] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.700984] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 813.816856] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:12:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 813.817129] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 813.817286] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.817469] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 813.817611] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.817754] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 813.818818] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 813.820904] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 813.821127] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 813.821306] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 813.821483] env[61905]: DEBUG nova.virt.hardware [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 813.822375] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a7a4c0-640d-4ee9-a703-9cb218a0aad4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.831422] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f1d3f8-f769-4fce-ad93-ebc60b09507c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.037713] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.150889] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362363, 'name': Rename_Task, 'duration_secs': 0.139411} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.151257] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.151570] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab1557f2-cdf1-4c3f-8f26-30477011bd96 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.158828] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 814.158828] env[61905]: value = "task-1362364" [ 814.158828] env[61905]: _type = "Task" [ 814.158828] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.169377] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.214172] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.363703] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.439011] env[61905]: DEBUG nova.compute.manager [req-67847412-92de-49c6-b202-fc8218f92532 req-c64312b8-3d8d-4d6c-b828-06c03326b16e service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Received event network-vif-plugged-28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 814.439236] env[61905]: DEBUG oslo_concurrency.lockutils [req-67847412-92de-49c6-b202-fc8218f92532 req-c64312b8-3d8d-4d6c-b828-06c03326b16e service nova] Acquiring lock "a4a03b8a-3206-4684-9d85-0e60ac643175-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.439435] env[61905]: DEBUG oslo_concurrency.lockutils [req-67847412-92de-49c6-b202-fc8218f92532 req-c64312b8-3d8d-4d6c-b828-06c03326b16e service nova] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.440069] env[61905]: DEBUG oslo_concurrency.lockutils [req-67847412-92de-49c6-b202-fc8218f92532 req-c64312b8-3d8d-4d6c-b828-06c03326b16e service nova] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.440069] env[61905]: DEBUG nova.compute.manager [req-67847412-92de-49c6-b202-fc8218f92532 req-c64312b8-3d8d-4d6c-b828-06c03326b16e service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] No waiting events found dispatching network-vif-plugged-28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 814.440069] env[61905]: WARNING nova.compute.manager [req-67847412-92de-49c6-b202-fc8218f92532 req-c64312b8-3d8d-4d6c-b828-06c03326b16e service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Received unexpected event network-vif-plugged-28c0803e-8de3-4d23-95e4-f7292cacc719 for instance with vm_state building and task_state spawning. [ 814.497778] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf23da1a-063c-40f2-b9a5-116d5038efa2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.506339] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b433f0-312d-4e17-bf87-593112544672 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.539631] env[61905]: DEBUG nova.network.neutron [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Successfully updated port: 28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 814.544309] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918a2954-8b13-4a33-b606-678aa8c5f1a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.550892] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f42f61-922a-4bf3-a167-8bef940d839f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.569205] env[61905]: DEBUG nova.compute.provider_tree [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.670737] env[61905]: DEBUG oslo_vmware.api [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362364, 'name': PowerOnVM_Task, 'duration_secs': 0.4432} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.670999] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.671306] env[61905]: INFO nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Took 7.09 seconds to spawn the instance on the hypervisor. [ 814.671479] env[61905]: DEBUG nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 814.672288] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89369e1d-2af7-40ab-bbe1-1135c68cc953 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.870620] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Releasing lock "refresh_cache-3ad9206a-5562-43a6-87a4-869f93b10933" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.870620] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 814.870620] env[61905]: DEBUG nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 814.870620] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.883989] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.044958] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "refresh_cache-a4a03b8a-3206-4684-9d85-0e60ac643175" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.044958] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "refresh_cache-a4a03b8a-3206-4684-9d85-0e60ac643175" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.044958] env[61905]: DEBUG nova.network.neutron [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.072571] env[61905]: DEBUG nova.scheduler.client.report [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.201149] env[61905]: INFO nova.compute.manager [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Took 37.92 seconds to build instance. [ 815.387906] env[61905]: DEBUG nova.network.neutron [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.578295] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.903s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.578295] env[61905]: ERROR nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Traceback (most recent call last): [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self.driver.spawn(context, instance, image_meta, [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 815.578295] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] vm_ref = self.build_virtual_machine(instance, [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] vif_infos = vmwarevif.get_vif_info(self._session, [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] for vif in network_info: [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return self._sync_wrapper(fn, *args, **kwargs) [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self.wait() [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self[:] = self._gt.wait() [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return self._exit_event.wait() [ 815.578587] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] result = hub.switch() [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return self.greenlet.switch() [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] result = function(*args, **kwargs) [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] return func(*args, **kwargs) [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] raise e [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] nwinfo = self.network_api.allocate_for_instance( [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 815.578892] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] created_port_ids = self._update_ports_for_instance( [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] with excutils.save_and_reraise_exception(): [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] self.force_reraise() [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] raise self.value [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] updated_port = self._update_port( [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] _ensure_no_port_binding_failure(port) [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.579238] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] raise exception.PortBindingFailed(port_id=port['id']) [ 815.579515] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] nova.exception.PortBindingFailed: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. [ 815.579515] env[61905]: ERROR nova.compute.manager [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] [ 815.579515] env[61905]: DEBUG nova.compute.utils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 815.580167] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.826s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.586727] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Build of instance 0bed6b1c-237b-469d-9f9b-0c4c84550ffb was re-scheduled: Binding failed for port de5272fe-50f3-40b4-8832-c646001368da, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 815.587245] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 815.591276] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.591437] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.591600] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.631843] env[61905]: DEBUG nova.network.neutron [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.705280] env[61905]: DEBUG oslo_concurrency.lockutils [None req-553011dd-7224-49e1-ba54-0b2a38833deb tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "60e68738-a333-44b2-a1e8-0b3da728059e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.053s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.869390] env[61905]: DEBUG nova.network.neutron [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Updating instance_info_cache with network_info: [{"id": "28c0803e-8de3-4d23-95e4-f7292cacc719", "address": "fa:16:3e:b7:50:9e", "network": {"id": "6a998fd5-11f3-48f2-8147-5ecd7ec93243", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-188776877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087dc5ebfc47453eb42a96a28550b39d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c0803e-8d", "ovs_interfaceid": "28c0803e-8de3-4d23-95e4-f7292cacc719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.890025] env[61905]: INFO nova.compute.manager [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] [instance: 3ad9206a-5562-43a6-87a4-869f93b10933] Took 1.02 seconds to deallocate network for instance. [ 816.130485] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.164124] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.164585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.208964] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 816.255614] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.373091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "refresh_cache-a4a03b8a-3206-4684-9d85-0e60ac643175" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.373403] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Instance network_info: |[{"id": "28c0803e-8de3-4d23-95e4-f7292cacc719", "address": "fa:16:3e:b7:50:9e", "network": {"id": "6a998fd5-11f3-48f2-8147-5ecd7ec93243", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-188776877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087dc5ebfc47453eb42a96a28550b39d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c0803e-8d", "ovs_interfaceid": "28c0803e-8de3-4d23-95e4-f7292cacc719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 816.373987] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:50:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62d6a386-ffdb-4232-83f3-cb21c5e59e85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28c0803e-8de3-4d23-95e4-f7292cacc719', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 816.381588] env[61905]: DEBUG oslo.service.loopingcall [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 816.381815] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 816.382048] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4213a3f4-cabd-4056-a2c2-b2892c0eb793 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.408499] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 816.408499] env[61905]: value = "task-1362365" [ 816.408499] env[61905]: _type = "Task" [ 816.408499] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.416925] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362365, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.448796] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb77dd2e-abbf-476a-a74c-569e7285a377 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.456038] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce53f81-748f-4bb0-80e2-e3662f4f2f56 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.487861] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bcba55-6657-4cd2-a365-193ad03be51b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.491406] env[61905]: DEBUG nova.compute.manager [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Received event network-changed-28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 816.491596] env[61905]: DEBUG nova.compute.manager [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Refreshing instance network info cache due to event network-changed-28c0803e-8de3-4d23-95e4-f7292cacc719. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 816.491807] env[61905]: DEBUG oslo_concurrency.lockutils [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] Acquiring lock "refresh_cache-a4a03b8a-3206-4684-9d85-0e60ac643175" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.491946] env[61905]: DEBUG oslo_concurrency.lockutils [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] Acquired lock "refresh_cache-a4a03b8a-3206-4684-9d85-0e60ac643175" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.492115] env[61905]: DEBUG nova.network.neutron [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Refreshing network info cache for port 28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 816.499062] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e90102-f6f7-4396-a07c-59da0abbafa3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.515201] env[61905]: DEBUG nova.compute.provider_tree [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.728234] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.758962] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-0bed6b1c-237b-469d-9f9b-0c4c84550ffb" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.759288] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 816.759488] env[61905]: DEBUG nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 816.759652] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 816.775994] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.919878] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362365, 'name': CreateVM_Task, 'duration_secs': 0.306199} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.919878] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 816.920373] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.920576] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.920891] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 816.921203] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75d924a9-93bb-45ae-bf75-8581dbb43337 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.925561] env[61905]: INFO nova.scheduler.client.report [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Deleted allocations for instance 3ad9206a-5562-43a6-87a4-869f93b10933 [ 816.935064] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 816.935064] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cfe1f2-231e-cfaf-89e5-5000c1a52d7c" [ 816.935064] env[61905]: _type = "Task" [ 816.935064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.947555] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cfe1f2-231e-cfaf-89e5-5000c1a52d7c, 'name': SearchDatastore_Task, 'duration_secs': 0.011074} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.948016] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.948359] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.948803] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.949449] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.949449] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.950344] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28bdb612-48ca-4deb-9f9a-59b9b49d780a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.958856] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.959105] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.960078] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98c6eba9-a41b-4cc4-b7f5-540e8c71ec32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.966414] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 816.966414] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52355ace-7726-3c82-ca66-93133a75fee8" [ 816.966414] env[61905]: _type = "Task" [ 816.966414] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.974856] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52355ace-7726-3c82-ca66-93133a75fee8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.022730] env[61905]: DEBUG nova.scheduler.client.report [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.279817] env[61905]: DEBUG nova.network.neutron [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.282125] env[61905]: DEBUG nova.network.neutron [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Updated VIF entry in instance network info cache for port 28c0803e-8de3-4d23-95e4-f7292cacc719. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 817.282495] env[61905]: DEBUG nova.network.neutron [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Updating instance_info_cache with network_info: [{"id": "28c0803e-8de3-4d23-95e4-f7292cacc719", "address": "fa:16:3e:b7:50:9e", "network": {"id": "6a998fd5-11f3-48f2-8147-5ecd7ec93243", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-188776877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "087dc5ebfc47453eb42a96a28550b39d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62d6a386-ffdb-4232-83f3-cb21c5e59e85", "external-id": "nsx-vlan-transportzone-950", "segmentation_id": 950, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28c0803e-8d", "ovs_interfaceid": "28c0803e-8de3-4d23-95e4-f7292cacc719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.438279] env[61905]: DEBUG oslo_concurrency.lockutils [None req-83010c2d-d91e-43a7-bf30-5175204bfd98 tempest-ServersNegativeTestJSON-1105083462 tempest-ServersNegativeTestJSON-1105083462-project-member] Lock "3ad9206a-5562-43a6-87a4-869f93b10933" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.383s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.494022] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52355ace-7726-3c82-ca66-93133a75fee8, 'name': SearchDatastore_Task, 'duration_secs': 0.008477} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.494022] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af9feaa0-e026-42c0-931c-7e3272a37810 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.503939] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "299479fb-9a94-40b8-928d-8e491dbe1af1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.504675] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.505278] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 817.505278] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52eeea4b-351f-57d4-a24c-870a372dc084" [ 817.505278] env[61905]: _type = "Task" [ 817.505278] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.516879] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52eeea4b-351f-57d4-a24c-870a372dc084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.531915] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.949s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.531915] env[61905]: ERROR nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Traceback (most recent call last): [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self.driver.spawn(context, instance, image_meta, [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 546, in spawn [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 817.531915] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] vm_ref = self.build_virtual_machine(instance, [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] vif_infos = vmwarevif.get_vif_info(self._session, [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] for vif in network_info: [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return self._sync_wrapper(fn, *args, **kwargs) [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self.wait() [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self[:] = self._gt.wait() [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return self._exit_event.wait() [ 817.532337] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] result = hub.switch() [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return self.greenlet.switch() [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] result = function(*args, **kwargs) [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] return func(*args, **kwargs) [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] raise e [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] nwinfo = self.network_api.allocate_for_instance( [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 817.532708] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] created_port_ids = self._update_ports_for_instance( [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] with excutils.save_and_reraise_exception(): [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] self.force_reraise() [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] raise self.value [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] updated_port = self._update_port( [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] _ensure_no_port_binding_failure(port) [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.533088] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] raise exception.PortBindingFailed(port_id=port['id']) [ 817.533457] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] nova.exception.PortBindingFailed: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. [ 817.533457] env[61905]: ERROR nova.compute.manager [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] [ 817.533457] env[61905]: DEBUG nova.compute.utils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. {{(pid=61905) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 817.537404] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Build of instance 8ef98f37-9059-4658-9679-fb50dc812eb5 was re-scheduled: Binding failed for port 097dded6-754c-4031-8dd4-acb71be06d83, please check neutron logs for more information. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 817.537404] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Unplugging VIFs for instance {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 817.537404] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.539487] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquired lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.539487] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.539840] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.663s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.541719] env[61905]: INFO nova.compute.claims [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.785347] env[61905]: INFO nova.compute.manager [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0bed6b1c-237b-469d-9f9b-0c4c84550ffb] Took 1.03 seconds to deallocate network for instance. [ 817.788524] env[61905]: DEBUG oslo_concurrency.lockutils [req-e6a52a15-5eaf-4aa2-b2ed-861de04b183b req-2c0a47b8-7fab-4d71-975b-b004462d3b47 service nova] Releasing lock "refresh_cache-a4a03b8a-3206-4684-9d85-0e60ac643175" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.941139] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 818.016791] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52eeea4b-351f-57d4-a24c-870a372dc084, 'name': SearchDatastore_Task, 'duration_secs': 0.014258} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.017133] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.017422] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] a4a03b8a-3206-4684-9d85-0e60ac643175/a4a03b8a-3206-4684-9d85-0e60ac643175.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 818.017713] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ace956d4-7d34-4de7-8640-e5373561016d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.025998] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 818.025998] env[61905]: value = "task-1362366" [ 818.025998] env[61905]: _type = "Task" [ 818.025998] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.035260] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362366, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.064803] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.152192] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.473684] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.539105] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362366, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.663086] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Releasing lock "refresh_cache-8ef98f37-9059-4658-9679-fb50dc812eb5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.663086] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61905) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 818.663086] env[61905]: DEBUG nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 818.663086] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.700048] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.842040] env[61905]: INFO nova.scheduler.client.report [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleted allocations for instance 0bed6b1c-237b-469d-9f9b-0c4c84550ffb [ 819.014921] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae65e01-57c9-42a4-b7dd-59b471e5a4ce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.023715] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087a9b25-6891-4cf4-9ecf-f2253055646a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.058497] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2126b81-23d1-42f9-8ec5-fd7e5b94afc3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.064435] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362366, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.862445} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.065077] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] a4a03b8a-3206-4684-9d85-0e60ac643175/a4a03b8a-3206-4684-9d85-0e60ac643175.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 819.065298] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 819.065577] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6c2e70a-957f-4a19-afe9-334e95c0779d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.071271] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6c06c2-2b5e-4d67-ba29-0f30959d6450 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.076157] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 819.076157] env[61905]: value = "task-1362367" [ 819.076157] env[61905]: _type = "Task" [ 819.076157] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.088481] env[61905]: DEBUG nova.compute.provider_tree [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.094651] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362367, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.205837] env[61905]: DEBUG nova.network.neutron [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.354672] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2edba27-07d7-4135-afd8-70daba764071 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0bed6b1c-237b-469d-9f9b-0c4c84550ffb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.443s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.586420] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362367, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.202716} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.587309] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 819.587549] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e3d9e0-cc41-43f7-be91-bec363d4db6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.592243] env[61905]: DEBUG nova.scheduler.client.report [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.619355] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] a4a03b8a-3206-4684-9d85-0e60ac643175/a4a03b8a-3206-4684-9d85-0e60ac643175.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 819.620481] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2826a90-329e-444a-8023-ec47f86c3766 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.640847] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 819.640847] env[61905]: value = "task-1362368" [ 819.640847] env[61905]: _type = "Task" [ 819.640847] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.649375] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362368, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.708660] env[61905]: INFO nova.compute.manager [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 8ef98f37-9059-4658-9679-fb50dc812eb5] Took 1.05 seconds to deallocate network for instance. [ 819.858042] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 820.121099] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.121676] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 820.124220] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.321s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.128677] env[61905]: INFO nova.compute.claims [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.151682] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362368, 'name': ReconfigVM_Task, 'duration_secs': 0.271448} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.152240] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Reconfigured VM instance instance-0000003a to attach disk [datastore2] a4a03b8a-3206-4684-9d85-0e60ac643175/a4a03b8a-3206-4684-9d85-0e60ac643175.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 820.152621] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02190a21-102e-4d00-9299-1a954034fe81 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.159620] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 820.159620] env[61905]: value = "task-1362369" [ 820.159620] env[61905]: _type = "Task" [ 820.159620] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.168850] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362369, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.379158] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.630101] env[61905]: DEBUG nova.compute.utils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.633427] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 820.633599] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 820.669985] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362369, 'name': Rename_Task, 'duration_secs': 0.147447} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.670566] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.670674] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1697e75d-2c86-42c0-9b70-e70dc67c749a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.678645] env[61905]: DEBUG nova.policy [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bacb2f35d1e43e08adf2ca42149e6a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8e003b9359346418cead91f86082c4b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 820.680196] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 820.680196] env[61905]: value = "task-1362370" [ 820.680196] env[61905]: _type = "Task" [ 820.680196] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.688763] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362370, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.756743] env[61905]: INFO nova.scheduler.client.report [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleted allocations for instance 8ef98f37-9059-4658-9679-fb50dc812eb5 [ 821.135113] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 821.196026] env[61905]: DEBUG oslo_vmware.api [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362370, 'name': PowerOnVM_Task, 'duration_secs': 0.491124} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.201286] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.201511] env[61905]: INFO nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Took 7.50 seconds to spawn the instance on the hypervisor. [ 821.201686] env[61905]: DEBUG nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 821.202955] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c37ca2-74b5-40c0-adb4-3763fcade272 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.265647] env[61905]: DEBUG oslo_concurrency.lockutils [None req-438f5dd8-24a7-4ebe-bd8c-a6bb40d73fa9 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "8ef98f37-9059-4658-9679-fb50dc812eb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.184s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.277054] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Successfully created port: 628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.475346] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae472fd-70cd-45cf-bd84-e2c738cde920 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.485173] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb430217-9482-4bc6-ad6d-19ea68a0777a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.516976] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be57ff8-bb16-4846-8b56-ac398bde7276 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.524974] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b17388-e52e-4264-b020-aa9a3d272ed3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.538929] env[61905]: DEBUG nova.compute.provider_tree [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.614330] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Successfully created port: ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.722287] env[61905]: INFO nova.compute.manager [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Took 37.33 seconds to build instance. [ 821.770674] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 821.988928] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.989231] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.042062] env[61905]: DEBUG nova.scheduler.client.report [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 822.147378] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 822.173665] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.173937] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.174114] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.174304] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.174446] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.174704] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.174909] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.175078] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.175244] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.175402] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.175566] env[61905]: DEBUG nova.virt.hardware [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.176432] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b111e712-0089-4284-87ec-6cfaf30ed847 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.184591] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01056af0-3fe5-4a92-854b-418b4a826ad1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.224216] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d77b4463-f763-40ab-96c3-70eb5e0db3b0 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.077s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.293893] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.547806] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.549729] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 822.552538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.622s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.552727] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.552878] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 822.557033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.618s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.557411] env[61905]: INFO nova.compute.claims [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.560498] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c5e836-dcc3-42f6-825c-a6b740da35b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.568852] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ef0520-8f97-4087-9d49-182685039c8e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.582748] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba93657-a94c-4b58-b306-81a1396b1f71 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.589671] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0313d3f8-117d-44c3-85db-0551fbc7e536 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.620049] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181481MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 822.620222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.726779] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 823.061813] env[61905]: DEBUG nova.compute.utils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.063146] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 823.063322] env[61905]: DEBUG nova.network.neutron [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.083820] env[61905]: DEBUG nova.compute.manager [req-175803b9-0ff5-4c97-ac45-a35c25971793 req-2e1a0e07-b800-4036-98a0-cb68ca0a12e4 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received event network-vif-plugged-628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 823.084042] env[61905]: DEBUG oslo_concurrency.lockutils [req-175803b9-0ff5-4c97-ac45-a35c25971793 req-2e1a0e07-b800-4036-98a0-cb68ca0a12e4 service nova] Acquiring lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.084259] env[61905]: DEBUG oslo_concurrency.lockutils [req-175803b9-0ff5-4c97-ac45-a35c25971793 req-2e1a0e07-b800-4036-98a0-cb68ca0a12e4 service nova] Lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.084420] env[61905]: DEBUG oslo_concurrency.lockutils [req-175803b9-0ff5-4c97-ac45-a35c25971793 req-2e1a0e07-b800-4036-98a0-cb68ca0a12e4 service nova] Lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.084593] env[61905]: DEBUG nova.compute.manager [req-175803b9-0ff5-4c97-ac45-a35c25971793 req-2e1a0e07-b800-4036-98a0-cb68ca0a12e4 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] No waiting events found dispatching network-vif-plugged-628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 823.084735] env[61905]: WARNING nova.compute.manager [req-175803b9-0ff5-4c97-ac45-a35c25971793 req-2e1a0e07-b800-4036-98a0-cb68ca0a12e4 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received unexpected event network-vif-plugged-628da9f1-2769-4d1e-8dd4-afd93b50bb24 for instance with vm_state building and task_state spawning. [ 823.136642] env[61905]: DEBUG nova.policy [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c96c7fd58c04379a6196d428ffcdffd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e36aced58abc41f0b5eba97157ffee2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 823.248594] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.266576] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Successfully updated port: 628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.465935] env[61905]: DEBUG nova.network.neutron [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Successfully created port: 28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.566229] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 823.875785] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf02028-2fda-43e6-87c3-6391a53b8b77 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.884736] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06307889-6349-4afc-99b8-8bebc6ce5436 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.917616] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759c86e3-c0cc-4ce1-9329-ba006ce51278 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.927090] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e226a826-6d89-43ba-b5fe-4e20c3a7b40c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.944165] env[61905]: DEBUG nova.compute.provider_tree [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.043655] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "a4a03b8a-3206-4684-9d85-0e60ac643175" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.043833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.044051] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "a4a03b8a-3206-4684-9d85-0e60ac643175-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.044240] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.044414] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.047140] env[61905]: INFO nova.compute.manager [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Terminating instance [ 824.049304] env[61905]: DEBUG nova.compute.manager [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 824.050040] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 824.050622] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c46cb7-b1d7-4645-bc2e-f42d5fa7c97a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.058519] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 824.059178] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ef84f68-fed9-4c54-8ea3-c65e8b81a4a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.065054] env[61905]: DEBUG oslo_vmware.api [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 824.065054] env[61905]: value = "task-1362371" [ 824.065054] env[61905]: _type = "Task" [ 824.065054] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.076150] env[61905]: DEBUG oslo_vmware.api [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.448781] env[61905]: DEBUG nova.scheduler.client.report [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 824.575495] env[61905]: DEBUG oslo_vmware.api [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362371, 'name': PowerOffVM_Task, 'duration_secs': 0.218397} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.575799] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.575971] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.576231] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ef55671-c518-4166-8f8e-c0410d54eba2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.578456] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 824.602791] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 824.603038] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 824.603198] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 824.603379] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 824.603529] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 824.603713] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 824.603923] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 824.604097] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 824.604265] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 824.604421] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 824.604585] env[61905]: DEBUG nova.virt.hardware [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 824.605421] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f63a0c-2ad9-462c-a89c-ee14ec34a782 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.612645] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5ab03c-093c-4f96-a6b6-4739a76db79b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.645862] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.645995] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.646164] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleting the datastore file [datastore2] a4a03b8a-3206-4684-9d85-0e60ac643175 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.646431] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21b30105-3d34-441c-bee5-0a665d219864 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.652201] env[61905]: DEBUG oslo_vmware.api [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 824.652201] env[61905]: value = "task-1362373" [ 824.652201] env[61905]: _type = "Task" [ 824.652201] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.660080] env[61905]: DEBUG oslo_vmware.api [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.954220] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.954771] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.961022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.746s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.961022] env[61905]: INFO nova.compute.claims [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.111168] env[61905]: DEBUG nova.network.neutron [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Successfully updated port: 28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.124131] env[61905]: DEBUG nova.compute.manager [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received event network-changed-628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 825.124131] env[61905]: DEBUG nova.compute.manager [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Refreshing instance network info cache due to event network-changed-628da9f1-2769-4d1e-8dd4-afd93b50bb24. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 825.124131] env[61905]: DEBUG oslo_concurrency.lockutils [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] Acquiring lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.124131] env[61905]: DEBUG oslo_concurrency.lockutils [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] Acquired lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.124131] env[61905]: DEBUG nova.network.neutron [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Refreshing network info cache for port 628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.161934] env[61905]: DEBUG oslo_vmware.api [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1521} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.162202] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.162385] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.162559] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.162730] env[61905]: INFO nova.compute.manager [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Took 1.11 seconds to destroy the instance on the hypervisor. [ 825.162962] env[61905]: DEBUG oslo.service.loopingcall [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.163171] env[61905]: DEBUG nova.compute.manager [-] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 825.163270] env[61905]: DEBUG nova.network.neutron [-] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 825.465310] env[61905]: DEBUG nova.compute.utils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 825.470018] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 825.470018] env[61905]: DEBUG nova.network.neutron [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.524436] env[61905]: DEBUG nova.policy [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a03d67845f5b4d3b8c06e3368fd8abe2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9db3c8210eec4249bb925664ef215701', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 825.535165] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Successfully updated port: ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.614095] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "refresh_cache-fed05097-de84-4617-bf9e-7fc116ebc56e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.614239] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquired lock "refresh_cache-fed05097-de84-4617-bf9e-7fc116ebc56e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.615023] env[61905]: DEBUG nova.network.neutron [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.660568] env[61905]: DEBUG nova.network.neutron [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 825.745226] env[61905]: DEBUG nova.network.neutron [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.793939] env[61905]: DEBUG nova.network.neutron [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Successfully created port: 5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.971622] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 826.021792] env[61905]: DEBUG nova.network.neutron [-] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.037597] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.148094] env[61905]: DEBUG nova.network.neutron [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.248111] env[61905]: DEBUG oslo_concurrency.lockutils [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] Releasing lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.248410] env[61905]: DEBUG nova.compute.manager [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Received event network-vif-plugged-28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 826.249307] env[61905]: DEBUG oslo_concurrency.lockutils [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] Acquiring lock "fed05097-de84-4617-bf9e-7fc116ebc56e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.249307] env[61905]: DEBUG oslo_concurrency.lockutils [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.249307] env[61905]: DEBUG oslo_concurrency.lockutils [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.249307] env[61905]: DEBUG nova.compute.manager [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] No waiting events found dispatching network-vif-plugged-28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 826.249307] env[61905]: WARNING nova.compute.manager [req-cb861ee8-9fc9-455c-aac5-35d7bf168d3d req-8fe1fa90-7374-4679-9f7f-aa43faa8181c service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Received unexpected event network-vif-plugged-28f77124-405d-4c8e-8ca6-3e2fc6d05139 for instance with vm_state building and task_state spawning. [ 826.249610] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquired lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.249748] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.287726] env[61905]: DEBUG nova.network.neutron [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Updating instance_info_cache with network_info: [{"id": "28f77124-405d-4c8e-8ca6-3e2fc6d05139", "address": "fa:16:3e:9f:25:2d", "network": {"id": "e72d41e5-46f9-42d8-b75c-b531cf8d8a15", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1523043429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e36aced58abc41f0b5eba97157ffee2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28f77124-40", "ovs_interfaceid": "28f77124-405d-4c8e-8ca6-3e2fc6d05139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.296559] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26082348-b72d-4f8a-a0b8-2d57a72be8c1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.304689] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be8342f-8bbb-4a48-9043-590c66f44fee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.340439] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2743a946-63c2-417b-9da0-ba517c5fe6f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.354370] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ae6349-9773-4c56-a62b-fff2da10df5c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.365723] env[61905]: DEBUG nova.compute.provider_tree [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.525162] env[61905]: INFO nova.compute.manager [-] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Took 1.36 seconds to deallocate network for instance. [ 826.784839] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 826.791900] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Releasing lock "refresh_cache-fed05097-de84-4617-bf9e-7fc116ebc56e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.791900] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Instance network_info: |[{"id": "28f77124-405d-4c8e-8ca6-3e2fc6d05139", "address": "fa:16:3e:9f:25:2d", "network": {"id": "e72d41e5-46f9-42d8-b75c-b531cf8d8a15", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1523043429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e36aced58abc41f0b5eba97157ffee2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28f77124-40", "ovs_interfaceid": "28f77124-405d-4c8e-8ca6-3e2fc6d05139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 826.792962] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:25:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28f77124-405d-4c8e-8ca6-3e2fc6d05139', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 826.800272] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Creating folder: Project (e36aced58abc41f0b5eba97157ffee2e). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 826.800565] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4c3dd8a-0dd6-4edc-9b6a-8d4160646758 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.812825] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Created folder: Project (e36aced58abc41f0b5eba97157ffee2e) in parent group-v289968. [ 826.813011] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Creating folder: Instances. Parent ref: group-v289997. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 826.813242] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84b191b9-25fb-476b-af95-69c12f11f0ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.821670] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Created folder: Instances in parent group-v289997. [ 826.821786] env[61905]: DEBUG oslo.service.loopingcall [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.821960] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 826.822177] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70ffeab8-d96c-40f6-891f-6de0bd42c0af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.842242] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 826.842242] env[61905]: value = "task-1362376" [ 826.842242] env[61905]: _type = "Task" [ 826.842242] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.849518] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362376, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.869956] env[61905]: DEBUG nova.scheduler.client.report [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 826.986242] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 827.014305] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.014578] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.014749] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.014942] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.015107] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.015269] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.015492] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.018907] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.019139] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.019345] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.019537] env[61905]: DEBUG nova.virt.hardware [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.020762] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dc3a26-7486-44e7-90bf-f1d3ef23d497 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.033226] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.034695] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf3ba34-08b7-4f0a-9bd0-dc805e71c799 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.146928] env[61905]: DEBUG nova.network.neutron [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Updating instance_info_cache with network_info: [{"id": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "address": "fa:16:3e:26:46:53", "network": {"id": "5627c279-6cac-4085-a9be-d6c356687ee9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315975732", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap628da9f1-27", "ovs_interfaceid": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ccfc79bb-4361-47ac-b5bd-62ac65252eee", "address": "fa:16:3e:70:57:37", "network": {"id": "20b08a68-3a05-46bc-bfac-b671fc1355df", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058009156", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccfc79bb-43", "ovs_interfaceid": "ccfc79bb-4361-47ac-b5bd-62ac65252eee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.192740] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Received event network-changed-28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.192939] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Refreshing instance network info cache due to event network-changed-28f77124-405d-4c8e-8ca6-3e2fc6d05139. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 827.193170] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Acquiring lock "refresh_cache-fed05097-de84-4617-bf9e-7fc116ebc56e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.193312] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Acquired lock "refresh_cache-fed05097-de84-4617-bf9e-7fc116ebc56e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.193466] env[61905]: DEBUG nova.network.neutron [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Refreshing network info cache for port 28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.360718] env[61905]: DEBUG nova.compute.manager [req-179ba273-d06f-4962-a6bb-044bd32d581e req-9037e38d-4aea-462d-9961-79b32b2c024a service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Received event network-vif-plugged-5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.361130] env[61905]: DEBUG oslo_concurrency.lockutils [req-179ba273-d06f-4962-a6bb-044bd32d581e req-9037e38d-4aea-462d-9961-79b32b2c024a service nova] Acquiring lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.361539] env[61905]: DEBUG oslo_concurrency.lockutils [req-179ba273-d06f-4962-a6bb-044bd32d581e req-9037e38d-4aea-462d-9961-79b32b2c024a service nova] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.361862] env[61905]: DEBUG oslo_concurrency.lockutils [req-179ba273-d06f-4962-a6bb-044bd32d581e req-9037e38d-4aea-462d-9961-79b32b2c024a service nova] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.362204] env[61905]: DEBUG nova.compute.manager [req-179ba273-d06f-4962-a6bb-044bd32d581e req-9037e38d-4aea-462d-9961-79b32b2c024a service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] No waiting events found dispatching network-vif-plugged-5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 827.362587] env[61905]: WARNING nova.compute.manager [req-179ba273-d06f-4962-a6bb-044bd32d581e req-9037e38d-4aea-462d-9961-79b32b2c024a service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Received unexpected event network-vif-plugged-5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b for instance with vm_state building and task_state spawning. [ 827.363503] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362376, 'name': CreateVM_Task, 'duration_secs': 0.287206} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.363773] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 827.364678] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.364987] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.365487] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 827.366121] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7d3252-ef9a-4dba-acff-729e62dfe3dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.372201] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 827.372201] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526c47bb-28b6-cf14-67d5-9099f5ff59ff" [ 827.372201] env[61905]: _type = "Task" [ 827.372201] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.375580] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.417s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.376028] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 827.378641] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.341s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.382264] env[61905]: INFO nova.compute.claims [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.392032] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526c47bb-28b6-cf14-67d5-9099f5ff59ff, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.392297] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.392509] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 827.392725] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.393716] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.393716] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 827.393716] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2331aea2-eda7-4d4d-b752-2b930c535cb6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.402859] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 827.402859] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 827.403498] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3763a55c-17af-42e6-9c97-cd0cb517ec66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.409407] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 827.409407] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ad0b97-08c2-a92c-2c94-2d439ab45168" [ 827.409407] env[61905]: _type = "Task" [ 827.409407] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.417247] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ad0b97-08c2-a92c-2c94-2d439ab45168, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.437660] env[61905]: DEBUG nova.network.neutron [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Successfully updated port: 5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.650639] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Releasing lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.651044] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Instance network_info: |[{"id": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "address": "fa:16:3e:26:46:53", "network": {"id": "5627c279-6cac-4085-a9be-d6c356687ee9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315975732", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap628da9f1-27", "ovs_interfaceid": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ccfc79bb-4361-47ac-b5bd-62ac65252eee", "address": "fa:16:3e:70:57:37", "network": {"id": "20b08a68-3a05-46bc-bfac-b671fc1355df", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058009156", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccfc79bb-43", "ovs_interfaceid": "ccfc79bb-4361-47ac-b5bd-62ac65252eee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 827.651498] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:46:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8abee039-d93e-48a7-8911-6416a3e1ff30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '628da9f1-2769-4d1e-8dd4-afd93b50bb24', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:57:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccfc79bb-4361-47ac-b5bd-62ac65252eee', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.660481] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Creating folder: Project (a8e003b9359346418cead91f86082c4b). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 827.660772] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0007d4bd-2153-4618-908f-36dce3c31c86 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.671094] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Created folder: Project (a8e003b9359346418cead91f86082c4b) in parent group-v289968. [ 827.671267] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Creating folder: Instances. Parent ref: group-v290000. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 827.671481] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c33ff14-8e5a-4553-8fe1-c94979785632 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.680011] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Created folder: Instances in parent group-v290000. [ 827.680235] env[61905]: DEBUG oslo.service.loopingcall [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.680414] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.680600] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f93f45a-95f0-4983-a53e-1df4421b4d06 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.703314] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.703314] env[61905]: value = "task-1362379" [ 827.703314] env[61905]: _type = "Task" [ 827.703314] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.710600] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362379, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.888031] env[61905]: DEBUG nova.compute.utils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.891595] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 827.891770] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.919064] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ad0b97-08c2-a92c-2c94-2d439ab45168, 'name': SearchDatastore_Task, 'duration_secs': 0.00871} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.921992] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1513410f-3ff7-4354-b62a-42dc1ebe19bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.927392] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 827.927392] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525dc226-2245-0064-b241-411b6a17871a" [ 827.927392] env[61905]: _type = "Task" [ 827.927392] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.932347] env[61905]: DEBUG nova.policy [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '067e276b21d94ec5a892d9cab4db71c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9c6510431f346018b853e88960e38a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 827.937580] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525dc226-2245-0064-b241-411b6a17871a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.940235] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "refresh_cache-ba3a1e36-a9f8-4482-908e-9c949c6f42ec" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.940446] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquired lock "refresh_cache-ba3a1e36-a9f8-4482-908e-9c949c6f42ec" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.940605] env[61905]: DEBUG nova.network.neutron [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.961207] env[61905]: DEBUG nova.network.neutron [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Updated VIF entry in instance network info cache for port 28f77124-405d-4c8e-8ca6-3e2fc6d05139. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.961571] env[61905]: DEBUG nova.network.neutron [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Updating instance_info_cache with network_info: [{"id": "28f77124-405d-4c8e-8ca6-3e2fc6d05139", "address": "fa:16:3e:9f:25:2d", "network": {"id": "e72d41e5-46f9-42d8-b75c-b531cf8d8a15", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1523043429-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e36aced58abc41f0b5eba97157ffee2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28f77124-40", "ovs_interfaceid": "28f77124-405d-4c8e-8ca6-3e2fc6d05139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.217114] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362379, 'name': CreateVM_Task, 'duration_secs': 0.311803} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.217114] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 828.219259] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.219540] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.219988] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 828.220329] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3a75fdf-84e3-4a15-82ee-2dc3a91d9c2c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.226183] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 828.226183] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52317c6f-0f9f-f322-177c-a84380915910" [ 828.226183] env[61905]: _type = "Task" [ 828.226183] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.235783] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52317c6f-0f9f-f322-177c-a84380915910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.247931] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Successfully created port: cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.393020] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 828.445609] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525dc226-2245-0064-b241-411b6a17871a, 'name': SearchDatastore_Task, 'duration_secs': 0.0117} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.446132] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.446396] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] fed05097-de84-4617-bf9e-7fc116ebc56e/fed05097-de84-4617-bf9e-7fc116ebc56e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 828.446912] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58fba527-7ff4-4b3e-97c7-739096239f9a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.454764] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 828.454764] env[61905]: value = "task-1362380" [ 828.454764] env[61905]: _type = "Task" [ 828.454764] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.462947] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.465723] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Releasing lock "refresh_cache-fed05097-de84-4617-bf9e-7fc116ebc56e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.465974] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received event network-vif-plugged-ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 828.466334] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Acquiring lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.466555] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.466725] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.466886] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] No waiting events found dispatching network-vif-plugged-ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 828.467066] env[61905]: WARNING nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received unexpected event network-vif-plugged-ccfc79bb-4361-47ac-b5bd-62ac65252eee for instance with vm_state building and task_state spawning. [ 828.467268] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Received event network-vif-deleted-28c0803e-8de3-4d23-95e4-f7292cacc719 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 828.467521] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received event network-changed-ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 828.467704] env[61905]: DEBUG nova.compute.manager [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Refreshing instance network info cache due to event network-changed-ccfc79bb-4361-47ac-b5bd-62ac65252eee. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 828.467928] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Acquiring lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.468137] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Acquired lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.468393] env[61905]: DEBUG nova.network.neutron [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Refreshing network info cache for port ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.491309] env[61905]: DEBUG nova.network.neutron [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.632967] env[61905]: DEBUG nova.network.neutron [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Updating instance_info_cache with network_info: [{"id": "5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b", "address": "fa:16:3e:f7:40:9b", "network": {"id": "0bd44730-4a80-4892-af67-ec6158610633", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2025754455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9db3c8210eec4249bb925664ef215701", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5087d36a-1d", "ovs_interfaceid": "5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.690745] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1013af-5e19-4bfe-bc99-05f943b723af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.698645] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749fa6fd-ea0e-491d-a751-0298b6b043ef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.732579] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f553f0f-777a-400c-b2b7-912593b1ae1e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.741523] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52317c6f-0f9f-f322-177c-a84380915910, 'name': SearchDatastore_Task, 'duration_secs': 0.009019} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.743780] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.744059] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 828.744305] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.744450] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.744622] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 828.744932] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6577eff1-b1c1-469b-8196-2a8af04a93dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.747785] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770060f9-f116-48e1-8f7c-e1d9d107b76f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.766016] env[61905]: DEBUG nova.compute.provider_tree [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.784925] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 828.785123] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 828.786190] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f04958ce-b054-441f-9008-416038bc21c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.791726] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 828.791726] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526988a1-656f-8359-2774-c662f1f536cd" [ 828.791726] env[61905]: _type = "Task" [ 828.791726] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.803151] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526988a1-656f-8359-2774-c662f1f536cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.964842] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362380, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502458} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.965298] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] fed05097-de84-4617-bf9e-7fc116ebc56e/fed05097-de84-4617-bf9e-7fc116ebc56e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.965667] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.966019] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1fdf8d9-c149-4708-8542-a29c46c3ffb0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.973559] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 828.973559] env[61905]: value = "task-1362381" [ 828.973559] env[61905]: _type = "Task" [ 828.973559] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.981937] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.136053] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Releasing lock "refresh_cache-ba3a1e36-a9f8-4482-908e-9c949c6f42ec" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.136385] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Instance network_info: |[{"id": "5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b", "address": "fa:16:3e:f7:40:9b", "network": {"id": "0bd44730-4a80-4892-af67-ec6158610633", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2025754455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9db3c8210eec4249bb925664ef215701", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5087d36a-1d", "ovs_interfaceid": "5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 829.136804] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:40:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae4e3171-21cd-4094-b6cf-81bf366c75bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.144314] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Creating folder: Project (9db3c8210eec4249bb925664ef215701). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.146800] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88b8690c-019a-4352-bc75-808d2284803c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.158186] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Created folder: Project (9db3c8210eec4249bb925664ef215701) in parent group-v289968. [ 829.158376] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Creating folder: Instances. Parent ref: group-v290003. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.158627] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd88e0fa-c6c6-40ff-a3ca-e70cdc3b2c5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.167372] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Created folder: Instances in parent group-v290003. [ 829.167656] env[61905]: DEBUG oslo.service.loopingcall [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.167821] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.168010] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e311402-e1b4-4a00-9f5b-889b424ed278 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.184507] env[61905]: DEBUG nova.network.neutron [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Updated VIF entry in instance network info cache for port ccfc79bb-4361-47ac-b5bd-62ac65252eee. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.184898] env[61905]: DEBUG nova.network.neutron [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Updating instance_info_cache with network_info: [{"id": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "address": "fa:16:3e:26:46:53", "network": {"id": "5627c279-6cac-4085-a9be-d6c356687ee9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315975732", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap628da9f1-27", "ovs_interfaceid": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ccfc79bb-4361-47ac-b5bd-62ac65252eee", "address": "fa:16:3e:70:57:37", "network": {"id": "20b08a68-3a05-46bc-bfac-b671fc1355df", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1058009156", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccfc79bb-43", "ovs_interfaceid": "ccfc79bb-4361-47ac-b5bd-62ac65252eee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.191900] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.191900] env[61905]: value = "task-1362384" [ 829.191900] env[61905]: _type = "Task" [ 829.191900] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.200188] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362384, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.268501] env[61905]: DEBUG nova.scheduler.client.report [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.301781] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526988a1-656f-8359-2774-c662f1f536cd, 'name': SearchDatastore_Task, 'duration_secs': 0.051273} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.302589] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6da47a9d-0277-4c25-abfa-5c0d690be066 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.307785] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 829.307785] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a415c3-eb05-8e1f-9857-56eae31ed887" [ 829.307785] env[61905]: _type = "Task" [ 829.307785] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.315179] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a415c3-eb05-8e1f-9857-56eae31ed887, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.405680] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 829.433203] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.433203] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.433203] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.433203] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.433439] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.433439] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.433627] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.433823] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.434059] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.434254] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.434478] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.435356] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a75475-c5cc-43be-9302-cc39dadd243f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.443680] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ff6cba-1887-4eb1-a82f-833fdd0f703a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.483290] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066685} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.483573] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 829.484382] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fad3ab-a27f-473a-ba80-fe0ed749a89f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.507180] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] fed05097-de84-4617-bf9e-7fc116ebc56e/fed05097-de84-4617-bf9e-7fc116ebc56e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 829.507505] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46e9182b-9df2-4d5c-aa88-53801cfa3edd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.527035] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 829.527035] env[61905]: value = "task-1362385" [ 829.527035] env[61905]: _type = "Task" [ 829.527035] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.540819] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362385, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.629696] env[61905]: DEBUG nova.compute.manager [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Received event network-changed-5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.629763] env[61905]: DEBUG nova.compute.manager [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Refreshing instance network info cache due to event network-changed-5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 829.629943] env[61905]: DEBUG oslo_concurrency.lockutils [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] Acquiring lock "refresh_cache-ba3a1e36-a9f8-4482-908e-9c949c6f42ec" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.630068] env[61905]: DEBUG oslo_concurrency.lockutils [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] Acquired lock "refresh_cache-ba3a1e36-a9f8-4482-908e-9c949c6f42ec" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.630229] env[61905]: DEBUG nova.network.neutron [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Refreshing network info cache for port 5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.687642] env[61905]: DEBUG oslo_concurrency.lockutils [req-deda21b9-3252-47c1-bc0e-6a24f96ca851 req-09a2efc4-8453-4fde-bf9e-a6be0d2a6519 service nova] Releasing lock "refresh_cache-d31570f0-7662-4e13-9dee-51dc66728acc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.702567] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362384, 'name': CreateVM_Task, 'duration_secs': 0.322266} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.702731] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.703419] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.703583] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.704308] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 829.704308] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11701053-f994-4e65-b498-5f809ae31d69 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.708788] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 829.708788] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528ab3f7-223d-de7e-fde2-fa310869bd74" [ 829.708788] env[61905]: _type = "Task" [ 829.708788] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.716019] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528ab3f7-223d-de7e-fde2-fa310869bd74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.774070] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.775124] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 829.777690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.050s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.781043] env[61905]: INFO nova.compute.claims [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.819893] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a415c3-eb05-8e1f-9857-56eae31ed887, 'name': SearchDatastore_Task, 'duration_secs': 0.00974} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.821057] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.821289] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] d31570f0-7662-4e13-9dee-51dc66728acc/d31570f0-7662-4e13-9dee-51dc66728acc.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 829.822057] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8bb1b0e-add9-495c-bc86-eb7d04781584 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.828453] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 829.828453] env[61905]: value = "task-1362386" [ 829.828453] env[61905]: _type = "Task" [ 829.828453] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.835829] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.898208] env[61905]: DEBUG nova.compute.manager [req-0ee8764f-b0c9-4a30-849c-27799e290131 req-541cd307-9c29-418e-b56f-8fd98a14955d service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Received event network-vif-plugged-cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 829.898438] env[61905]: DEBUG oslo_concurrency.lockutils [req-0ee8764f-b0c9-4a30-849c-27799e290131 req-541cd307-9c29-418e-b56f-8fd98a14955d service nova] Acquiring lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.898643] env[61905]: DEBUG oslo_concurrency.lockutils [req-0ee8764f-b0c9-4a30-849c-27799e290131 req-541cd307-9c29-418e-b56f-8fd98a14955d service nova] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.898808] env[61905]: DEBUG oslo_concurrency.lockutils [req-0ee8764f-b0c9-4a30-849c-27799e290131 req-541cd307-9c29-418e-b56f-8fd98a14955d service nova] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.898969] env[61905]: DEBUG nova.compute.manager [req-0ee8764f-b0c9-4a30-849c-27799e290131 req-541cd307-9c29-418e-b56f-8fd98a14955d service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] No waiting events found dispatching network-vif-plugged-cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 829.899148] env[61905]: WARNING nova.compute.manager [req-0ee8764f-b0c9-4a30-849c-27799e290131 req-541cd307-9c29-418e-b56f-8fd98a14955d service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Received unexpected event network-vif-plugged-cebb7714-2d86-4f26-804f-fb6614734c71 for instance with vm_state building and task_state spawning. [ 830.006357] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Successfully updated port: cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.037768] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362385, 'name': ReconfigVM_Task, 'duration_secs': 0.263127} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.038257] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Reconfigured VM instance instance-0000003c to attach disk [datastore1] fed05097-de84-4617-bf9e-7fc116ebc56e/fed05097-de84-4617-bf9e-7fc116ebc56e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 830.038724] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-736bf740-37d2-402b-90bf-4c4d8351eef6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.044337] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 830.044337] env[61905]: value = "task-1362387" [ 830.044337] env[61905]: _type = "Task" [ 830.044337] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.053133] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362387, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.221397] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528ab3f7-223d-de7e-fde2-fa310869bd74, 'name': SearchDatastore_Task, 'duration_secs': 0.018014} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.221742] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.222066] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.222229] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.222694] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.222694] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.222832] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4076fe65-23cc-4d60-ad2a-a15363e4e1eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.232106] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.232293] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.235133] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65b6e949-4894-4da3-914d-7e02fe000045 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.240703] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 830.240703] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52aed308-d24f-f23f-84cb-2c16f8d0e8b1" [ 830.240703] env[61905]: _type = "Task" [ 830.240703] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.248684] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52aed308-d24f-f23f-84cb-2c16f8d0e8b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.284915] env[61905]: DEBUG nova.compute.utils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.288742] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 830.288742] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.333857] env[61905]: DEBUG nova.policy [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '067e276b21d94ec5a892d9cab4db71c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9c6510431f346018b853e88960e38a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.341177] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362386, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.407591] env[61905]: DEBUG nova.network.neutron [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Updated VIF entry in instance network info cache for port 5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.408015] env[61905]: DEBUG nova.network.neutron [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Updating instance_info_cache with network_info: [{"id": "5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b", "address": "fa:16:3e:f7:40:9b", "network": {"id": "0bd44730-4a80-4892-af67-ec6158610633", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2025754455-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9db3c8210eec4249bb925664ef215701", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5087d36a-1d", "ovs_interfaceid": "5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.510723] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "refresh_cache-aeb72a57-d319-479d-a1c7-3cebc6f73f09" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.512556] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "refresh_cache-aeb72a57-d319-479d-a1c7-3cebc6f73f09" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.513418] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.557028] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362387, 'name': Rename_Task, 'duration_secs': 0.134596} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.557350] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.557635] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-702fcfab-73c0-4bb5-9a85-b1f76a895424 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.563985] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 830.563985] env[61905]: value = "task-1362388" [ 830.563985] env[61905]: _type = "Task" [ 830.563985] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.573787] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362388, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.754498] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52aed308-d24f-f23f-84cb-2c16f8d0e8b1, 'name': SearchDatastore_Task, 'duration_secs': 0.018014} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.754498] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68c288e0-16f6-4d04-b2c3-c876f8d573fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.761102] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 830.761102] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52850a2c-5751-94e2-e6a3-dcb29837c357" [ 830.761102] env[61905]: _type = "Task" [ 830.761102] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.771017] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52850a2c-5751-94e2-e6a3-dcb29837c357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.789260] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 830.825166] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Successfully created port: 6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.841316] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362386, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685985} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.841550] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] d31570f0-7662-4e13-9dee-51dc66728acc/d31570f0-7662-4e13-9dee-51dc66728acc.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 830.841889] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.844622] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2e14715-38ae-4fb5-b836-9c84cbf9e912 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.851832] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 830.851832] env[61905]: value = "task-1362389" [ 830.851832] env[61905]: _type = "Task" [ 830.851832] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.861083] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362389, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.910509] env[61905]: DEBUG oslo_concurrency.lockutils [req-0e4137ef-80ce-4ca9-9971-e021977a4c19 req-e37ad223-c144-4386-8202-f5c93ba0ee15 service nova] Releasing lock "refresh_cache-ba3a1e36-a9f8-4482-908e-9c949c6f42ec" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.076103] env[61905]: DEBUG oslo_vmware.api [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362388, 'name': PowerOnVM_Task, 'duration_secs': 0.458799} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.076103] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 831.076103] env[61905]: INFO nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Took 6.50 seconds to spawn the instance on the hypervisor. [ 831.076453] env[61905]: DEBUG nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 831.077347] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9767fb7-d2a1-47ed-a017-e2947b65e624 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.087305] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.118238] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00f89d1-cfc0-4652-a46a-9042a3553c24 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.128367] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827d63e5-f9d4-4a43-a651-f9572dbf2b20 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.159494] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec47a87c-f918-462d-8fdd-47ed3d1e8a5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.167065] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5df91e-2fed-43fa-99b0-52cff269831f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.181338] env[61905]: DEBUG nova.compute.provider_tree [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.254138] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Updating instance_info_cache with network_info: [{"id": "cebb7714-2d86-4f26-804f-fb6614734c71", "address": "fa:16:3e:16:f0:86", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcebb7714-2d", "ovs_interfaceid": "cebb7714-2d86-4f26-804f-fb6614734c71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.268255] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52850a2c-5751-94e2-e6a3-dcb29837c357, 'name': SearchDatastore_Task, 'duration_secs': 0.008806} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.269082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.269388] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] ba3a1e36-a9f8-4482-908e-9c949c6f42ec/ba3a1e36-a9f8-4482-908e-9c949c6f42ec.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.269648] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a235af4f-b793-42a0-bdd1-200f6d4855f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.276884] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 831.276884] env[61905]: value = "task-1362391" [ 831.276884] env[61905]: _type = "Task" [ 831.276884] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.284358] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.361961] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362389, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064351} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.362257] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.363025] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a426ee9a-d69b-4189-a35b-735a4f81218c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.387896] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] d31570f0-7662-4e13-9dee-51dc66728acc/d31570f0-7662-4e13-9dee-51dc66728acc.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.388173] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7565719-0ba9-48b0-bc03-3906aefebf6c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.407183] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 831.407183] env[61905]: value = "task-1362392" [ 831.407183] env[61905]: _type = "Task" [ 831.407183] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.415169] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.599127] env[61905]: INFO nova.compute.manager [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Took 35.81 seconds to build instance. [ 831.684535] env[61905]: DEBUG nova.scheduler.client.report [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 831.756310] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "refresh_cache-aeb72a57-d319-479d-a1c7-3cebc6f73f09" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.756691] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Instance network_info: |[{"id": "cebb7714-2d86-4f26-804f-fb6614734c71", "address": "fa:16:3e:16:f0:86", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcebb7714-2d", "ovs_interfaceid": "cebb7714-2d86-4f26-804f-fb6614734c71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 831.757136] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:f0:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cebb7714-2d86-4f26-804f-fb6614734c71', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.765231] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Creating folder: Project (a9c6510431f346018b853e88960e38a3). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.765536] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb771539-41e2-4808-8916-64f61f6c42db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.775015] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Created folder: Project (a9c6510431f346018b853e88960e38a3) in parent group-v289968. [ 831.775260] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Creating folder: Instances. Parent ref: group-v290006. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 831.775495] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-260901e1-2030-4f56-a2fa-7b0d45bc4242 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.786028] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494218} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.786569] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] ba3a1e36-a9f8-4482-908e-9c949c6f42ec/ba3a1e36-a9f8-4482-908e-9c949c6f42ec.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.786778] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.787028] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Created folder: Instances in parent group-v290006. [ 831.787232] env[61905]: DEBUG oslo.service.loopingcall [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.787410] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f50654b-4d60-4ffb-b474-ee1d54a2f0c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.789091] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.789318] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9846e9c9-b84e-4c77-90c7-04adc7fb4f04 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.804706] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 831.807916] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 831.807916] env[61905]: value = "task-1362395" [ 831.807916] env[61905]: _type = "Task" [ 831.807916] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.811994] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.811994] env[61905]: value = "task-1362396" [ 831.811994] env[61905]: _type = "Task" [ 831.811994] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.819285] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.823868] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362396, 'name': CreateVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.833265] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 831.833509] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 831.833681] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.833840] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 831.833981] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.834139] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 831.834345] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 831.834500] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 831.834663] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 831.834821] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 831.834989] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.836176] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6d63e9-89f2-4bd1-a4e9-71fc810952a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.845593] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8bf951-4357-40ba-99a1-b1c390958e53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.917245] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362392, 'name': ReconfigVM_Task, 'duration_secs': 0.328436} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.917563] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Reconfigured VM instance instance-0000003b to attach disk [datastore1] d31570f0-7662-4e13-9dee-51dc66728acc/d31570f0-7662-4e13-9dee-51dc66728acc.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.918415] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62531ecc-4e3a-4642-bbfd-e48747e9eb5f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.925306] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 831.925306] env[61905]: value = "task-1362397" [ 831.925306] env[61905]: _type = "Task" [ 831.925306] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.935247] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362397, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.955962] env[61905]: DEBUG nova.compute.manager [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Received event network-changed-cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 831.956224] env[61905]: DEBUG nova.compute.manager [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Refreshing instance network info cache due to event network-changed-cebb7714-2d86-4f26-804f-fb6614734c71. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 831.956448] env[61905]: DEBUG oslo_concurrency.lockutils [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] Acquiring lock "refresh_cache-aeb72a57-d319-479d-a1c7-3cebc6f73f09" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.956566] env[61905]: DEBUG oslo_concurrency.lockutils [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] Acquired lock "refresh_cache-aeb72a57-d319-479d-a1c7-3cebc6f73f09" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.956746] env[61905]: DEBUG nova.network.neutron [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Refreshing network info cache for port cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.101648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c4ebcb6-1009-42cb-bfcc-6384ddaf9b0c tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.379s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.190300] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.190825] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 832.193316] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.720s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.194676] env[61905]: INFO nova.compute.claims [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.318866] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064604} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.319533] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.320301] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51be99d4-8884-4017-9dd8-a58f2e9c1b67 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.325853] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362396, 'name': CreateVM_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.345310] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] ba3a1e36-a9f8-4482-908e-9c949c6f42ec/ba3a1e36-a9f8-4482-908e-9c949c6f42ec.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.345723] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fac521d-5cd6-4916-b369-8fe5c1743395 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.362416] env[61905]: DEBUG nova.compute.manager [req-22805746-9a2d-41ad-a067-56e143725e8f req-96bce2ec-1705-4d26-9f08-88d432722c8c service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Received event network-vif-plugged-6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.362742] env[61905]: DEBUG oslo_concurrency.lockutils [req-22805746-9a2d-41ad-a067-56e143725e8f req-96bce2ec-1705-4d26-9f08-88d432722c8c service nova] Acquiring lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.362742] env[61905]: DEBUG oslo_concurrency.lockutils [req-22805746-9a2d-41ad-a067-56e143725e8f req-96bce2ec-1705-4d26-9f08-88d432722c8c service nova] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.362821] env[61905]: DEBUG oslo_concurrency.lockutils [req-22805746-9a2d-41ad-a067-56e143725e8f req-96bce2ec-1705-4d26-9f08-88d432722c8c service nova] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.362973] env[61905]: DEBUG nova.compute.manager [req-22805746-9a2d-41ad-a067-56e143725e8f req-96bce2ec-1705-4d26-9f08-88d432722c8c service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] No waiting events found dispatching network-vif-plugged-6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 832.363149] env[61905]: WARNING nova.compute.manager [req-22805746-9a2d-41ad-a067-56e143725e8f req-96bce2ec-1705-4d26-9f08-88d432722c8c service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Received unexpected event network-vif-plugged-6d6e4a71-550f-4400-af0a-c8e5178259bf for instance with vm_state building and task_state spawning. [ 832.370271] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 832.370271] env[61905]: value = "task-1362398" [ 832.370271] env[61905]: _type = "Task" [ 832.370271] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.377450] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362398, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.434595] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362397, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.455628] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Successfully updated port: 6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.604596] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.677503] env[61905]: DEBUG nova.network.neutron [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Updated VIF entry in instance network info cache for port cebb7714-2d86-4f26-804f-fb6614734c71. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.677873] env[61905]: DEBUG nova.network.neutron [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Updating instance_info_cache with network_info: [{"id": "cebb7714-2d86-4f26-804f-fb6614734c71", "address": "fa:16:3e:16:f0:86", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcebb7714-2d", "ovs_interfaceid": "cebb7714-2d86-4f26-804f-fb6614734c71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.701135] env[61905]: DEBUG nova.compute.utils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 832.702468] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 832.702633] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.749831] env[61905]: DEBUG nova.policy [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '067e276b21d94ec5a892d9cab4db71c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9c6510431f346018b853e88960e38a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 832.823347] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362396, 'name': CreateVM_Task, 'duration_secs': 0.796758} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.823556] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.824176] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.824366] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.824684] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.824935] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29505d53-dbae-4e59-ba51-823b57e65aea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.830152] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 832.830152] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520c2bf3-7610-968f-58e9-c8775a249154" [ 832.830152] env[61905]: _type = "Task" [ 832.830152] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.839628] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520c2bf3-7610-968f-58e9-c8775a249154, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.878434] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362398, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.935385] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362397, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.958698] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "refresh_cache-74f94a46-63e4-44e0-9142-7e7d46cd31a7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.963019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "refresh_cache-74f94a46-63e4-44e0-9142-7e7d46cd31a7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.963019] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.035351] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Successfully created port: f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.135546] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.181436] env[61905]: DEBUG oslo_concurrency.lockutils [req-c0ca0426-12a9-4252-a1d4-0262801b257e req-05947160-a37f-4094-8c71-4f8b4ec75667 service nova] Releasing lock "refresh_cache-aeb72a57-d319-479d-a1c7-3cebc6f73f09" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.203634] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 833.340771] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520c2bf3-7610-968f-58e9-c8775a249154, 'name': SearchDatastore_Task, 'duration_secs': 0.014494} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.343369] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.343622] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.343854] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.343996] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.344201] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.344622] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cee6e035-4a27-4c2d-90d6-30b23a6829ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.358711] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.358711] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.359848] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6afdb2d5-2139-44de-8e8c-8d7783c9b012 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.364632] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 833.364632] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b5bf05-0fef-874b-4c10-4c5ba93fba50" [ 833.364632] env[61905]: _type = "Task" [ 833.364632] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.377335] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b5bf05-0fef-874b-4c10-4c5ba93fba50, 'name': SearchDatastore_Task, 'duration_secs': 0.008707} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.378482] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb156a8c-7f22-445a-9c6d-750763e475b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.385904] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362398, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.389450] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 833.389450] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c52163-5883-3dbb-e5f6-a4a4de02b6e9" [ 833.389450] env[61905]: _type = "Task" [ 833.389450] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.396894] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c52163-5883-3dbb-e5f6-a4a4de02b6e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.437198] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362397, 'name': Rename_Task, 'duration_secs': 1.127166} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.437774] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.437774] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e1cfb15-3d2d-4789-b4c1-ea44d9162945 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.444368] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 833.444368] env[61905]: value = "task-1362399" [ 833.444368] env[61905]: _type = "Task" [ 833.444368] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.454900] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.492937] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.508884] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17f897f-09e9-4018-b42c-8f0ca88f1777 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.519288] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32be3547-1b26-4026-a9d5-a127fb4de8fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.559104] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eafef3d-3536-4e2a-ad5f-4c80bc8ec15e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.570483] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267599b5-17a4-40fa-9d54-79f8a61a93ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.586266] env[61905]: DEBUG nova.compute.provider_tree [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.668576] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Updating instance_info_cache with network_info: [{"id": "6d6e4a71-550f-4400-af0a-c8e5178259bf", "address": "fa:16:3e:55:ab:ab", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d6e4a71-55", "ovs_interfaceid": "6d6e4a71-550f-4400-af0a-c8e5178259bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.881802] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362398, 'name': ReconfigVM_Task, 'duration_secs': 1.016122} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.882108] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Reconfigured VM instance instance-0000003d to attach disk [datastore2] ba3a1e36-a9f8-4482-908e-9c949c6f42ec/ba3a1e36-a9f8-4482-908e-9c949c6f42ec.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.882755] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d9478b5-cdda-4482-a84e-fae054b5aa36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.889936] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 833.889936] env[61905]: value = "task-1362400" [ 833.889936] env[61905]: _type = "Task" [ 833.889936] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.904802] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362400, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.905072] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c52163-5883-3dbb-e5f6-a4a4de02b6e9, 'name': SearchDatastore_Task, 'duration_secs': 0.008845} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.905327] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.905583] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] aeb72a57-d319-479d-a1c7-3cebc6f73f09/aeb72a57-d319-479d-a1c7-3cebc6f73f09.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 833.905837] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d2e6cf4-370a-4a79-9b8c-0e94e0346f62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.913319] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 833.913319] env[61905]: value = "task-1362401" [ 833.913319] env[61905]: _type = "Task" [ 833.913319] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.923086] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.955851] env[61905]: DEBUG oslo_vmware.api [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362399, 'name': PowerOnVM_Task, 'duration_secs': 0.480499} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.956148] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.956368] env[61905]: INFO nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Took 11.81 seconds to spawn the instance on the hypervisor. [ 833.956552] env[61905]: DEBUG nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 833.957358] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21f8f0b-add6-4e70-921d-172cb395c420 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.093311] env[61905]: DEBUG nova.scheduler.client.report [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 834.172049] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "refresh_cache-74f94a46-63e4-44e0-9142-7e7d46cd31a7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.172352] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Instance network_info: |[{"id": "6d6e4a71-550f-4400-af0a-c8e5178259bf", "address": "fa:16:3e:55:ab:ab", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d6e4a71-55", "ovs_interfaceid": "6d6e4a71-550f-4400-af0a-c8e5178259bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 834.173329] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:ab:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d6e4a71-550f-4400-af0a-c8e5178259bf', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.180261] env[61905]: DEBUG oslo.service.loopingcall [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.180856] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.181116] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78e3a300-6cc3-4e33-8a8c-a2c73c6fa5a0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.204199] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.204199] env[61905]: value = "task-1362402" [ 834.204199] env[61905]: _type = "Task" [ 834.204199] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.219617] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 834.222796] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362402, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.252438] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.252438] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.252438] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.252664] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.253562] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.253562] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.253562] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.253711] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.253887] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.254170] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.254437] env[61905]: DEBUG nova.virt.hardware [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.255945] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdfd704-8099-4fa5-8b68-651b5f719dbb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.265782] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71e435a-20d3-4a38-bb60-11afb240bd6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.380586] env[61905]: DEBUG nova.compute.manager [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Received event network-changed-6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.380866] env[61905]: DEBUG nova.compute.manager [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Refreshing instance network info cache due to event network-changed-6d6e4a71-550f-4400-af0a-c8e5178259bf. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 834.380866] env[61905]: DEBUG oslo_concurrency.lockutils [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] Acquiring lock "refresh_cache-74f94a46-63e4-44e0-9142-7e7d46cd31a7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.381048] env[61905]: DEBUG oslo_concurrency.lockutils [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] Acquired lock "refresh_cache-74f94a46-63e4-44e0-9142-7e7d46cd31a7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.381137] env[61905]: DEBUG nova.network.neutron [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Refreshing network info cache for port 6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.404540] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362400, 'name': Rename_Task, 'duration_secs': 0.16072} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.404767] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.405045] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14873b02-6301-4146-bacc-f2a745909cdc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.412314] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 834.412314] env[61905]: value = "task-1362403" [ 834.412314] env[61905]: _type = "Task" [ 834.412314] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.428142] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362403, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.431841] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362401, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.477684] env[61905]: INFO nova.compute.manager [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Took 40.62 seconds to build instance. [ 834.599694] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.600599] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 834.606627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.227s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.609020] env[61905]: INFO nova.compute.claims [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.642723] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Successfully updated port: f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.700516] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "fed05097-de84-4617-bf9e-7fc116ebc56e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.700766] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.700966] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "fed05097-de84-4617-bf9e-7fc116ebc56e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.701170] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.701342] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.703334] env[61905]: INFO nova.compute.manager [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Terminating instance [ 834.705151] env[61905]: DEBUG nova.compute.manager [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 834.705342] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.709304] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df100049-6141-4062-9658-bec5b7e3ac61 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.716174] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.719185] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1871ee74-2356-4d0e-9d42-47be8ce03f7e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.720771] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362402, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.726041] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 834.726041] env[61905]: value = "task-1362404" [ 834.726041] env[61905]: _type = "Task" [ 834.726041] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.735169] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.928009] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362403, 'name': PowerOnVM_Task} progress is 76%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.931212] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643529} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.931524] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] aeb72a57-d319-479d-a1c7-3cebc6f73f09/aeb72a57-d319-479d-a1c7-3cebc6f73f09.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 834.931757] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 834.932050] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d51ae925-960d-44a2-8d10-65464e9d732b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.939175] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 834.939175] env[61905]: value = "task-1362405" [ 834.939175] env[61905]: _type = "Task" [ 834.939175] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.947358] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.979448] env[61905]: DEBUG oslo_concurrency.lockutils [None req-88f3aa9c-3bfa-41d0-ba5b-d8179c4fd73f tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "d31570f0-7662-4e13-9dee-51dc66728acc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.418s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.094074] env[61905]: DEBUG nova.network.neutron [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Updated VIF entry in instance network info cache for port 6d6e4a71-550f-4400-af0a-c8e5178259bf. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.094465] env[61905]: DEBUG nova.network.neutron [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Updating instance_info_cache with network_info: [{"id": "6d6e4a71-550f-4400-af0a-c8e5178259bf", "address": "fa:16:3e:55:ab:ab", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d6e4a71-55", "ovs_interfaceid": "6d6e4a71-550f-4400-af0a-c8e5178259bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.107625] env[61905]: DEBUG nova.compute.utils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.109385] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 835.109548] env[61905]: DEBUG nova.network.neutron [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.145972] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "refresh_cache-111d10e8-7e36-48b6-be45-2275c36fbee4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.146052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "refresh_cache-111d10e8-7e36-48b6-be45-2275c36fbee4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.147355] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.153859] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "d31570f0-7662-4e13-9dee-51dc66728acc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.154069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "d31570f0-7662-4e13-9dee-51dc66728acc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.154266] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.154447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.154645] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "d31570f0-7662-4e13-9dee-51dc66728acc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.157184] env[61905]: INFO nova.compute.manager [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Terminating instance [ 835.160047] env[61905]: DEBUG nova.policy [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eea5f65ce8b54d4e9eb726f58a4e39a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40c35a175eec4445817a2860c1f5770d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 835.162067] env[61905]: DEBUG nova.compute.manager [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 835.162271] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 835.163499] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bff4d9-cd09-4b06-9d83-b9ef39f8a9af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.171971] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 835.172240] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-930c66c2-c495-43fb-9839-3b7233bd43ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.177596] env[61905]: DEBUG oslo_vmware.api [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 835.177596] env[61905]: value = "task-1362406" [ 835.177596] env[61905]: _type = "Task" [ 835.177596] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.185608] env[61905]: DEBUG oslo_vmware.api [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.215497] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362402, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.236573] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.423469] env[61905]: DEBUG oslo_vmware.api [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362403, 'name': PowerOnVM_Task, 'duration_secs': 0.920067} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.423906] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.423991] env[61905]: INFO nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Took 8.44 seconds to spawn the instance on the hypervisor. [ 835.424114] env[61905]: DEBUG nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 835.424930] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107f20a7-0849-40a8-920a-c30af4e0fede {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.448702] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067891} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.448927] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.450138] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc6000e-3eb0-4f95-98f6-0b4b065151b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.473234] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] aeb72a57-d319-479d-a1c7-3cebc6f73f09/aeb72a57-d319-479d-a1c7-3cebc6f73f09.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.474204] env[61905]: DEBUG nova.network.neutron [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Successfully created port: 8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.476731] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ef01c52-259a-4156-8fb8-92d211274cdc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.493220] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 835.503190] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 835.503190] env[61905]: value = "task-1362407" [ 835.503190] env[61905]: _type = "Task" [ 835.503190] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.509510] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.597145] env[61905]: DEBUG oslo_concurrency.lockutils [req-d5022f70-22fd-4a27-a6d2-39bb19400055 req-6dd21c90-a1ae-439b-b44e-ce02ea913ff5 service nova] Releasing lock "refresh_cache-74f94a46-63e4-44e0-9142-7e7d46cd31a7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.613557] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 835.684133] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.692875] env[61905]: DEBUG oslo_vmware.api [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362406, 'name': PowerOffVM_Task, 'duration_secs': 0.392797} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.693229] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.693229] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 835.693480] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f15d35d2-2fdf-44fc-929e-483dfddec1d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.715823] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362402, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.739182] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.835149] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 835.835376] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 835.835555] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Deleting the datastore file [datastore1] d31570f0-7662-4e13-9dee-51dc66728acc {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 835.835829] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2de06332-68b4-4770-bc55-e95956b424db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.842782] env[61905]: DEBUG oslo_vmware.api [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for the task: (returnval){ [ 835.842782] env[61905]: value = "task-1362409" [ 835.842782] env[61905]: _type = "Task" [ 835.842782] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.852269] env[61905]: DEBUG oslo_vmware.api [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.889029] env[61905]: DEBUG nova.network.neutron [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Updating instance_info_cache with network_info: [{"id": "f0c313e8-8429-4144-9f60-e93ba0ce30d1", "address": "fa:16:3e:ca:48:d8", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0c313e8-84", "ovs_interfaceid": "f0c313e8-8429-4144-9f60-e93ba0ce30d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.946041] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bdcbe9-055e-44df-af26-3c619c0d6db6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.951625] env[61905]: INFO nova.compute.manager [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Took 28.04 seconds to build instance. [ 835.956660] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ba0ce0-8db1-4687-9b9b-914f3d68d716 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.991078] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f05068-9656-4424-8ffc-90f3e7f8af1b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.003664] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c53a801-e5fb-40d2-9b18-c07ba0ec26da {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.012258] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.022865] env[61905]: DEBUG nova.compute.provider_tree [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.024855] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362407, 'name': ReconfigVM_Task, 'duration_secs': 0.490045} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.025147] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Reconfigured VM instance instance-0000003e to attach disk [datastore2] aeb72a57-d319-479d-a1c7-3cebc6f73f09/aeb72a57-d319-479d-a1c7-3cebc6f73f09.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.025759] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-509154cf-3ddc-4afc-8dd1-6f975ff5346d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.034418] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 836.034418] env[61905]: value = "task-1362410" [ 836.034418] env[61905]: _type = "Task" [ 836.034418] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.042548] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362410, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.215803] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362402, 'name': CreateVM_Task, 'duration_secs': 1.573171} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.215988] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.216784] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.216946] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.217319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.217848] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1949e6c-8a75-4998-bcd4-e727e6e65ecc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.222293] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 836.222293] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521f5252-549c-abeb-8afd-b19450c3a809" [ 836.222293] env[61905]: _type = "Task" [ 836.222293] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.233857] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521f5252-549c-abeb-8afd-b19450c3a809, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.238627] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362404, 'name': PowerOffVM_Task, 'duration_secs': 1.119806} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.238627] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.238797] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.239026] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-470f5451-d44d-470f-a4c1-d98ffaa4cb59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.292914] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.293466] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.293466] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Deleting the datastore file [datastore1] fed05097-de84-4617-bf9e-7fc116ebc56e {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.293576] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-992afbca-befd-41e9-b393-7bb0090a6683 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.300008] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for the task: (returnval){ [ 836.300008] env[61905]: value = "task-1362412" [ 836.300008] env[61905]: _type = "Task" [ 836.300008] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.307574] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.352557] env[61905]: DEBUG oslo_vmware.api [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Task: {'id': task-1362409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19086} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.352783] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.352964] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.353157] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.353332] env[61905]: INFO nova.compute.manager [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Took 1.19 seconds to destroy the instance on the hypervisor. [ 836.353572] env[61905]: DEBUG oslo.service.loopingcall [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.353772] env[61905]: DEBUG nova.compute.manager [-] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 836.353868] env[61905]: DEBUG nova.network.neutron [-] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.391661] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "refresh_cache-111d10e8-7e36-48b6-be45-2275c36fbee4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.392017] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Instance network_info: |[{"id": "f0c313e8-8429-4144-9f60-e93ba0ce30d1", "address": "fa:16:3e:ca:48:d8", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0c313e8-84", "ovs_interfaceid": "f0c313e8-8429-4144-9f60-e93ba0ce30d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 836.392465] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:48:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0746f464-a938-427b-ba02-600449df5070', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0c313e8-8429-4144-9f60-e93ba0ce30d1', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.400410] env[61905]: DEBUG oslo.service.loopingcall [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.400652] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.400889] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-876add18-9f62-40e0-993c-50b05f582da4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.439074] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.439074] env[61905]: value = "task-1362413" [ 836.439074] env[61905]: _type = "Task" [ 836.439074] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.445278] env[61905]: DEBUG nova.compute.manager [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Received event network-vif-plugged-f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.445491] env[61905]: DEBUG oslo_concurrency.lockutils [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] Acquiring lock "111d10e8-7e36-48b6-be45-2275c36fbee4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.447569] env[61905]: DEBUG oslo_concurrency.lockutils [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.447766] env[61905]: DEBUG oslo_concurrency.lockutils [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.448078] env[61905]: DEBUG nova.compute.manager [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] No waiting events found dispatching network-vif-plugged-f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 836.448166] env[61905]: WARNING nova.compute.manager [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Received unexpected event network-vif-plugged-f0c313e8-8429-4144-9f60-e93ba0ce30d1 for instance with vm_state building and task_state spawning. [ 836.448338] env[61905]: DEBUG nova.compute.manager [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Received event network-changed-f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.448525] env[61905]: DEBUG nova.compute.manager [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Refreshing instance network info cache due to event network-changed-f0c313e8-8429-4144-9f60-e93ba0ce30d1. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 836.448712] env[61905]: DEBUG oslo_concurrency.lockutils [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] Acquiring lock "refresh_cache-111d10e8-7e36-48b6-be45-2275c36fbee4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.448847] env[61905]: DEBUG oslo_concurrency.lockutils [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] Acquired lock "refresh_cache-111d10e8-7e36-48b6-be45-2275c36fbee4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.448999] env[61905]: DEBUG nova.network.neutron [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Refreshing network info cache for port f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.456694] env[61905]: DEBUG oslo_concurrency.lockutils [None req-98eb5fad-3eec-44ca-b113-84a8099a8669 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.618s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.457197] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362413, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.528637] env[61905]: DEBUG nova.scheduler.client.report [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.545146] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362410, 'name': Rename_Task, 'duration_secs': 0.138145} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.545775] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 836.546164] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d2fd874-f8d6-4c25-9ff5-8394c737b58c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.552473] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 836.552473] env[61905]: value = "task-1362414" [ 836.552473] env[61905]: _type = "Task" [ 836.552473] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.560905] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.607501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.607501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.607501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.607501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.607501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.609058] env[61905]: INFO nova.compute.manager [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Terminating instance [ 836.610950] env[61905]: DEBUG nova.compute.manager [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 836.611231] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.612190] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ca989d-a7eb-4c84-a6ec-002b7c495270 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.620671] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.620902] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32568151-8631-40f0-9737-669bcca3f5ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.623226] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 836.630861] env[61905]: DEBUG oslo_vmware.api [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 836.630861] env[61905]: value = "task-1362415" [ 836.630861] env[61905]: _type = "Task" [ 836.630861] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.639534] env[61905]: DEBUG oslo_vmware.api [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.657123] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.657386] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.657562] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.657754] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.657966] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.658292] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.658545] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.658729] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.658914] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.659103] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.659301] env[61905]: DEBUG nova.virt.hardware [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.660446] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5440fb-e4e1-4576-91d2-b5eba5777897 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.670692] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c891a837-1e12-41dd-8639-d34ac184a2b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.699372] env[61905]: DEBUG nova.compute.manager [req-a21e24cc-d82f-4ef3-bc8b-05a20c6f8c0d req-a14e17f6-ee8d-4bc7-b996-76619e99f94a service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received event network-vif-deleted-ccfc79bb-4361-47ac-b5bd-62ac65252eee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 836.699556] env[61905]: INFO nova.compute.manager [req-a21e24cc-d82f-4ef3-bc8b-05a20c6f8c0d req-a14e17f6-ee8d-4bc7-b996-76619e99f94a service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Neutron deleted interface ccfc79bb-4361-47ac-b5bd-62ac65252eee; detaching it from the instance and deleting it from the info cache [ 836.699834] env[61905]: DEBUG nova.network.neutron [req-a21e24cc-d82f-4ef3-bc8b-05a20c6f8c0d req-a14e17f6-ee8d-4bc7-b996-76619e99f94a service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Updating instance_info_cache with network_info: [{"id": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "address": "fa:16:3e:26:46:53", "network": {"id": "5627c279-6cac-4085-a9be-d6c356687ee9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1315975732", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8e003b9359346418cead91f86082c4b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8abee039-d93e-48a7-8911-6416a3e1ff30", "external-id": "nsx-vlan-transportzone-654", "segmentation_id": 654, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap628da9f1-27", "ovs_interfaceid": "628da9f1-2769-4d1e-8dd4-afd93b50bb24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.733754] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521f5252-549c-abeb-8afd-b19450c3a809, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.733754] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.734020] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 836.734297] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.734457] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.734661] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 836.734950] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ff248c5-d4a8-454c-88cf-38fb9c4314a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.743865] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 836.744082] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 836.744920] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18b2c976-f0fb-458c-a4e0-d1983e0cb320 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.751192] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 836.751192] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529e589d-0c7b-8416-c942-de62b1873dac" [ 836.751192] env[61905]: _type = "Task" [ 836.751192] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.760595] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529e589d-0c7b-8416-c942-de62b1873dac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.810997] env[61905]: DEBUG oslo_vmware.api [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Task: {'id': task-1362412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165477} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.811308] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.811508] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.811700] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.811898] env[61905]: INFO nova.compute.manager [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Took 2.11 seconds to destroy the instance on the hypervisor. [ 836.812212] env[61905]: DEBUG oslo.service.loopingcall [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.812443] env[61905]: DEBUG nova.compute.manager [-] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 836.812534] env[61905]: DEBUG nova.network.neutron [-] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.956557] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362413, 'name': CreateVM_Task, 'duration_secs': 0.446701} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.956736] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.957888] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.958085] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.958466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.958800] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a3a9e2-61bf-4a79-80eb-71cb3c8ea7c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.964071] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.969803] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 836.969803] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e924fb-565f-7d5e-add7-6ed946058c1c" [ 836.969803] env[61905]: _type = "Task" [ 836.969803] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.978644] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e924fb-565f-7d5e-add7-6ed946058c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.032728] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.033257] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 837.036118] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.742s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.037504] env[61905]: INFO nova.compute.claims [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.063434] env[61905]: DEBUG nova.network.neutron [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Successfully updated port: 8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.070375] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362414, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.140379] env[61905]: DEBUG oslo_vmware.api [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362415, 'name': PowerOffVM_Task, 'duration_secs': 0.229276} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.142699] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.142875] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.143135] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98011bb7-84c8-49fa-b29a-1bce81b647cd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.198917] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.199134] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.199402] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Deleting the datastore file [datastore2] ba3a1e36-a9f8-4482-908e-9c949c6f42ec {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.199708] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09e57c5a-e9b7-48f7-8b0d-e820ed0abd59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.202811] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11defcb4-c66e-46e4-bf54-9faea9d89268 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.211834] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8111839d-d291-49c2-824f-aec6d7a3bd54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.223242] env[61905]: DEBUG oslo_vmware.api [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for the task: (returnval){ [ 837.223242] env[61905]: value = "task-1362417" [ 837.223242] env[61905]: _type = "Task" [ 837.223242] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.244192] env[61905]: DEBUG oslo_vmware.api [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.244615] env[61905]: DEBUG nova.compute.manager [req-a21e24cc-d82f-4ef3-bc8b-05a20c6f8c0d req-a14e17f6-ee8d-4bc7-b996-76619e99f94a service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Detach interface failed, port_id=ccfc79bb-4361-47ac-b5bd-62ac65252eee, reason: Instance d31570f0-7662-4e13-9dee-51dc66728acc could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 837.261783] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529e589d-0c7b-8416-c942-de62b1873dac, 'name': SearchDatastore_Task, 'duration_secs': 0.010016} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.262167] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d23bf27-5587-4b65-9b0f-2e3e6cc3ad56 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.269565] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 837.269565] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52306c3d-6e05-4469-5a46-25f75ce64792" [ 837.269565] env[61905]: _type = "Task" [ 837.269565] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.278468] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52306c3d-6e05-4469-5a46-25f75ce64792, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.306507] env[61905]: DEBUG nova.network.neutron [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Updated VIF entry in instance network info cache for port f0c313e8-8429-4144-9f60-e93ba0ce30d1. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.306853] env[61905]: DEBUG nova.network.neutron [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Updating instance_info_cache with network_info: [{"id": "f0c313e8-8429-4144-9f60-e93ba0ce30d1", "address": "fa:16:3e:ca:48:d8", "network": {"id": "3c776afe-850b-4844-801e-e34c49167258", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-336797527-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9c6510431f346018b853e88960e38a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0746f464-a938-427b-ba02-600449df5070", "external-id": "nsx-vlan-transportzone-881", "segmentation_id": 881, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0c313e8-84", "ovs_interfaceid": "f0c313e8-8429-4144-9f60-e93ba0ce30d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.377605] env[61905]: DEBUG nova.network.neutron [-] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.483094] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e924fb-565f-7d5e-add7-6ed946058c1c, 'name': SearchDatastore_Task, 'duration_secs': 0.009282} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.483401] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.483623] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.483824] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.485020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.542009] env[61905]: DEBUG nova.compute.utils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.545272] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 837.545435] env[61905]: DEBUG nova.network.neutron [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.564067] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362414, 'name': PowerOnVM_Task, 'duration_secs': 0.526419} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.564564] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.564706] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.564848] env[61905]: DEBUG nova.network.neutron [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.565845] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.566082] env[61905]: INFO nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Took 8.16 seconds to spawn the instance on the hypervisor. [ 837.566289] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 837.567180] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f452c6f-6f9c-432e-97e1-2daf79c5495e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.583345] env[61905]: DEBUG nova.policy [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f27bcad7ab3b4e0e98065f24300f9425', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30edd7bc94ee492cb7f4e4f388e45b8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 837.734678] env[61905]: DEBUG oslo_vmware.api [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Task: {'id': task-1362417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167638} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.734936] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.735134] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.735310] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.735477] env[61905]: INFO nova.compute.manager [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Took 1.12 seconds to destroy the instance on the hypervisor. [ 837.735712] env[61905]: DEBUG oslo.service.loopingcall [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.736265] env[61905]: DEBUG nova.compute.manager [-] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 837.736363] env[61905]: DEBUG nova.network.neutron [-] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.778474] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52306c3d-6e05-4469-5a46-25f75ce64792, 'name': SearchDatastore_Task, 'duration_secs': 0.01909} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.778474] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.778474] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 74f94a46-63e4-44e0-9142-7e7d46cd31a7/74f94a46-63e4-44e0-9142-7e7d46cd31a7.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 837.778776] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.778776] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.779052] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-423def8b-b629-494f-a07b-93490ffcfd1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.781170] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-396fcc47-4287-4a5d-b6ac-7f2c7dc2fe39 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.788356] env[61905]: DEBUG nova.network.neutron [-] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.789705] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 837.789705] env[61905]: value = "task-1362418" [ 837.789705] env[61905]: _type = "Task" [ 837.789705] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.791159] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.791334] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.794834] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58eb53b5-eb0b-47a3-9521-e5eac6459543 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.804022] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 837.804022] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52dc5644-2e45-c804-a671-e7dfc949d5c2" [ 837.804022] env[61905]: _type = "Task" [ 837.804022] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.804590] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.813161] env[61905]: DEBUG oslo_concurrency.lockutils [req-eaf26fcd-7946-4e57-93dd-d0b28589d728 req-906f5bcd-8d70-4b0b-a621-d976c9414dc4 service nova] Releasing lock "refresh_cache-111d10e8-7e36-48b6-be45-2275c36fbee4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.813782] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52dc5644-2e45-c804-a671-e7dfc949d5c2, 'name': SearchDatastore_Task, 'duration_secs': 0.008826} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.814521] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e05ee1-5dfb-430e-b7f7-a329937f2850 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.819220] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 837.819220] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a5c63f-5f87-6a00-002e-d35eec18c524" [ 837.819220] env[61905]: _type = "Task" [ 837.819220] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.826696] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a5c63f-5f87-6a00-002e-d35eec18c524, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.880432] env[61905]: INFO nova.compute.manager [-] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Took 1.53 seconds to deallocate network for instance. [ 837.970432] env[61905]: DEBUG nova.network.neutron [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Successfully created port: 1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.046489] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 838.090300] env[61905]: INFO nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Took 25.90 seconds to build instance. [ 838.132645] env[61905]: DEBUG nova.network.neutron [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.279468] env[61905]: DEBUG nova.network.neutron [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Updating instance_info_cache with network_info: [{"id": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "address": "fa:16:3e:cb:9d:c2", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ee1c399-6b", "ovs_interfaceid": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.291009] env[61905]: INFO nova.compute.manager [-] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Took 1.48 seconds to deallocate network for instance. [ 838.316980] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362418, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521458} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.317284] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 74f94a46-63e4-44e0-9142-7e7d46cd31a7/74f94a46-63e4-44e0-9142-7e7d46cd31a7.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.317499] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.318223] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-879433ac-a39c-45fc-98ea-dc5089691898 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.328828] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 838.328828] env[61905]: value = "task-1362419" [ 838.328828] env[61905]: _type = "Task" [ 838.328828] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.333840] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a5c63f-5f87-6a00-002e-d35eec18c524, 'name': SearchDatastore_Task, 'duration_secs': 0.007946} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.337298] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.337580] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 111d10e8-7e36-48b6-be45-2275c36fbee4/111d10e8-7e36-48b6-be45-2275c36fbee4.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.339067] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d61e8ccd-39ed-4326-8b61-e2e268388344 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.345827] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.349792] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 838.349792] env[61905]: value = "task-1362420" [ 838.349792] env[61905]: _type = "Task" [ 838.349792] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.359869] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362420, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.389171] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.410258] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ccf554-5b1a-4c10-8758-4ff4e739034e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.417691] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4cf0a1-19f0-478b-9f81-5e6dc2f060bf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.447089] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e230b7e-18e7-439c-bb67-686d54765c91 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.454160] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71c8447-edad-4d22-8a6b-8f2e645aa814 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.467261] env[61905]: DEBUG nova.compute.provider_tree [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.470361] env[61905]: DEBUG nova.compute.manager [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Received event network-vif-plugged-8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.470564] env[61905]: DEBUG oslo_concurrency.lockutils [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] Acquiring lock "12c21d8e-1941-4481-9216-015ba6c09b9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.470762] env[61905]: DEBUG oslo_concurrency.lockutils [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.470928] env[61905]: DEBUG oslo_concurrency.lockutils [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.471102] env[61905]: DEBUG nova.compute.manager [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] No waiting events found dispatching network-vif-plugged-8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.471434] env[61905]: WARNING nova.compute.manager [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Received unexpected event network-vif-plugged-8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 for instance with vm_state building and task_state spawning. [ 838.471434] env[61905]: DEBUG nova.compute.manager [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Received event network-changed-8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.471556] env[61905]: DEBUG nova.compute.manager [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Refreshing instance network info cache due to event network-changed-8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.471761] env[61905]: DEBUG oslo_concurrency.lockutils [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] Acquiring lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.591745] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 147.020s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.720444] env[61905]: DEBUG nova.network.neutron [-] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.730478] env[61905]: DEBUG nova.compute.manager [req-60713cc5-07e4-4141-b65d-4cc3377be234 req-13b9456d-432c-4a9b-9d09-da7351f61f55 service nova] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Received event network-vif-deleted-628da9f1-2769-4d1e-8dd4-afd93b50bb24 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.730697] env[61905]: DEBUG nova.compute.manager [req-60713cc5-07e4-4141-b65d-4cc3377be234 req-13b9456d-432c-4a9b-9d09-da7351f61f55 service nova] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Received event network-vif-deleted-28f77124-405d-4c8e-8ca6-3e2fc6d05139 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.730810] env[61905]: DEBUG nova.compute.manager [req-60713cc5-07e4-4141-b65d-4cc3377be234 req-13b9456d-432c-4a9b-9d09-da7351f61f55 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Received event network-vif-deleted-5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.730942] env[61905]: INFO nova.compute.manager [req-60713cc5-07e4-4141-b65d-4cc3377be234 req-13b9456d-432c-4a9b-9d09-da7351f61f55 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Neutron deleted interface 5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b; detaching it from the instance and deleting it from the info cache [ 838.731120] env[61905]: DEBUG nova.network.neutron [req-60713cc5-07e4-4141-b65d-4cc3377be234 req-13b9456d-432c-4a9b-9d09-da7351f61f55 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.782212] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.782567] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Instance network_info: |[{"id": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "address": "fa:16:3e:cb:9d:c2", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ee1c399-6b", "ovs_interfaceid": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 838.782914] env[61905]: DEBUG oslo_concurrency.lockutils [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] Acquired lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.783105] env[61905]: DEBUG nova.network.neutron [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Refreshing network info cache for port 8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.784376] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:9d:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '778b9a40-d603-4765-ac88-bd6d42c457a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.792157] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Creating folder: Project (40c35a175eec4445817a2860c1f5770d). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.793169] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d50fd3ce-47fb-48b3-8a26-e2a86a438b31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.800713] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.804169] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Created folder: Project (40c35a175eec4445817a2860c1f5770d) in parent group-v289968. [ 838.804169] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Creating folder: Instances. Parent ref: group-v290011. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.804288] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-700a6857-1adc-4ffb-bc68-efaa6d97844f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.815220] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Created folder: Instances in parent group-v290011. [ 838.815460] env[61905]: DEBUG oslo.service.loopingcall [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.815650] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.815888] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f47b7ff-2283-4650-a5d4-3131686afc66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.837562] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.837562] env[61905]: value = "task-1362423" [ 838.837562] env[61905]: _type = "Task" [ 838.837562] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.848397] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057892} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.851774] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 838.852135] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362423, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.852917] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fe86b4-a8e6-4d14-8636-a0ccf06165a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.864687] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362420, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.882407] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 74f94a46-63e4-44e0-9142-7e7d46cd31a7/74f94a46-63e4-44e0-9142-7e7d46cd31a7.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 838.882725] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3295b62d-d558-41af-bcda-f3c88bf92ec7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.903840] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 838.903840] env[61905]: value = "task-1362424" [ 838.903840] env[61905]: _type = "Task" [ 838.903840] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.916177] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362424, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.974205] env[61905]: DEBUG nova.scheduler.client.report [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 839.059121] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 839.084254] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 839.084511] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 839.084667] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.084843] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 839.084985] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.085144] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 839.085344] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 839.085499] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 839.085662] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 839.085817] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 839.085982] env[61905]: DEBUG nova.virt.hardware [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 839.086892] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2da0d0-6703-4205-9292-a6b2891a003d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.093610] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 839.097172] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6e8dba-c584-480a-9000-5db75fe1807a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.223498] env[61905]: INFO nova.compute.manager [-] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Took 1.49 seconds to deallocate network for instance. [ 839.236910] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8e74e78-5221-4ed0-935d-1c816043abf6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.247020] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac5bcd5-27e4-4c76-9a62-3bcaab1f55de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.272369] env[61905]: DEBUG nova.compute.manager [req-60713cc5-07e4-4141-b65d-4cc3377be234 req-13b9456d-432c-4a9b-9d09-da7351f61f55 service nova] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Detach interface failed, port_id=5087d36a-1d4e-4ef2-9ee4-c4cd2938bd1b, reason: Instance ba3a1e36-a9f8-4482-908e-9c949c6f42ec could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 839.347124] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362423, 'name': CreateVM_Task, 'duration_secs': 0.49499} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.347330] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.348068] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.348221] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.348643] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.348914] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-355e1687-2b75-41da-ad81-8529ceead49a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.353591] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 839.353591] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f121a3-f1f2-1c7a-918e-4de962f94c14" [ 839.353591] env[61905]: _type = "Task" [ 839.353591] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.365203] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362420, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730204} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.368988] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 111d10e8-7e36-48b6-be45-2275c36fbee4/111d10e8-7e36-48b6-be45-2275c36fbee4.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.369343] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.369788] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f121a3-f1f2-1c7a-918e-4de962f94c14, 'name': SearchDatastore_Task, 'duration_secs': 0.00903} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.372336] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-684f8521-8a62-40e1-b164-0ca2e0b00217 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.374240] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.374461] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.374683] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.374825] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.375040] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.375305] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-248765fb-2d2e-49fd-b5d3-e3740541736e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.382206] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 839.382206] env[61905]: value = "task-1362425" [ 839.382206] env[61905]: _type = "Task" [ 839.382206] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.386423] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.386593] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.387590] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e20f6243-524a-4258-a5d0-e397bd1beaf6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.392812] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362425, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.397847] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 839.397847] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d9f654-02af-5fbd-00ee-0b0f605b8754" [ 839.397847] env[61905]: _type = "Task" [ 839.397847] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.405517] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d9f654-02af-5fbd-00ee-0b0f605b8754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.413243] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.478829] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.479672] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 839.482394] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.862s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.525879] env[61905]: DEBUG nova.network.neutron [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Updated VIF entry in instance network info cache for port 8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.526256] env[61905]: DEBUG nova.network.neutron [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Updating instance_info_cache with network_info: [{"id": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "address": "fa:16:3e:cb:9d:c2", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ee1c399-6b", "ovs_interfaceid": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.622539] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.723604] env[61905]: DEBUG nova.network.neutron [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Successfully updated port: 1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.730580] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.894490] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362425, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064946} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.894754] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.895562] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be4735a-d14d-4e99-846e-8c0d88cd52c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.919054] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 111d10e8-7e36-48b6-be45-2275c36fbee4/111d10e8-7e36-48b6-be45-2275c36fbee4.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.922877] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c24ed84-263c-43fe-95e9-ed72be22e455 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.946043] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d9f654-02af-5fbd-00ee-0b0f605b8754, 'name': SearchDatastore_Task, 'duration_secs': 0.008561} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.948543] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20451deb-f319-439f-b865-1bc09c992fc0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.956394] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362424, 'name': ReconfigVM_Task, 'duration_secs': 0.802805} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.957042] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 74f94a46-63e4-44e0-9142-7e7d46cd31a7/74f94a46-63e4-44e0-9142-7e7d46cd31a7.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.958061] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99c43949-cd90-41a4-a4be-fae308bb2e5f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.960846] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 839.960846] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520930b9-e1f4-8624-bd73-9b0b911c82b2" [ 839.960846] env[61905]: _type = "Task" [ 839.960846] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.962468] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 839.962468] env[61905]: value = "task-1362426" [ 839.962468] env[61905]: _type = "Task" [ 839.962468] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.969592] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 839.969592] env[61905]: value = "task-1362427" [ 839.969592] env[61905]: _type = "Task" [ 839.969592] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.978504] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520930b9-e1f4-8624-bd73-9b0b911c82b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.978504] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362426, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.983370] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362427, 'name': Rename_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.985891] env[61905]: DEBUG nova.compute.utils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 839.993468] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 839.993468] env[61905]: DEBUG nova.network.neutron [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 840.029151] env[61905]: DEBUG oslo_concurrency.lockutils [req-51d8c5b4-5092-4da9-b504-d943e6eb5fdc req-71d1de53-b195-4e28-b465-a644d2046f0d service nova] Releasing lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.049787] env[61905]: DEBUG nova.policy [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ca67104cdbd4ac9be9a57bb19846925', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7af9072624d04f669e8183581e6ca50a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 840.227084] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-a9ac365e-2be1-438d-a514-6fa7b26fa10c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.227289] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-a9ac365e-2be1-438d-a514-6fa7b26fa10c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.227451] env[61905]: DEBUG nova.network.neutron [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.348584] env[61905]: DEBUG nova.network.neutron [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Successfully created port: ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.479219] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520930b9-e1f4-8624-bd73-9b0b911c82b2, 'name': SearchDatastore_Task, 'duration_secs': 0.01383} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.479461] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362426, 'name': ReconfigVM_Task, 'duration_secs': 0.271361} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.480071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.480329] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/12c21d8e-1941-4481-9216-015ba6c09b9b.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.480610] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 111d10e8-7e36-48b6-be45-2275c36fbee4/111d10e8-7e36-48b6-be45-2275c36fbee4.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.481651] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48d32d8c-491b-4fee-a11c-5b1e7ee6d2c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.483121] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-621275a6-a03c-4f28-8dfc-64735c9fc2d6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.487192] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362427, 'name': Rename_Task, 'duration_secs': 0.20423} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.487974] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.488196] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df931162-7cd2-4830-865a-bf625a554962 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.492743] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 840.492743] env[61905]: value = "task-1362428" [ 840.492743] env[61905]: _type = "Task" [ 840.492743] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.494190] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 840.494190] env[61905]: value = "task-1362429" [ 840.494190] env[61905]: _type = "Task" [ 840.494190] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.497766] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 840.497766] env[61905]: value = "task-1362430" [ 840.497766] env[61905]: _type = "Task" [ 840.497766] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.500697] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 840.515096] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.519431] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362429, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.523655] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362430, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.531760] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 60e68738-a333-44b2-a1e8-0b3da728059e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.531978] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a4a03b8a-3206-4684-9d85-0e60ac643175 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.532141] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance d31570f0-7662-4e13-9dee-51dc66728acc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.532564] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance fed05097-de84-4617-bf9e-7fc116ebc56e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.532564] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ba3a1e36-a9f8-4482-908e-9c949c6f42ec is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 840.532564] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance aeb72a57-d319-479d-a1c7-3cebc6f73f09 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.532780] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 74f94a46-63e4-44e0-9142-7e7d46cd31a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.532780] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 111d10e8-7e36-48b6-be45-2275c36fbee4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.532880] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 12c21d8e-1941-4481-9216-015ba6c09b9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.532963] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a9ac365e-2be1-438d-a514-6fa7b26fa10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.533078] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e1a22f3e-4557-44d2-8e34-cc75f573fe41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 840.756512] env[61905]: DEBUG nova.compute.manager [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Received event network-vif-plugged-1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.756816] env[61905]: DEBUG oslo_concurrency.lockutils [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] Acquiring lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.757140] env[61905]: DEBUG oslo_concurrency.lockutils [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.757377] env[61905]: DEBUG oslo_concurrency.lockutils [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.757439] env[61905]: DEBUG nova.compute.manager [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] No waiting events found dispatching network-vif-plugged-1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 840.757624] env[61905]: WARNING nova.compute.manager [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Received unexpected event network-vif-plugged-1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb for instance with vm_state building and task_state spawning. [ 840.757803] env[61905]: DEBUG nova.compute.manager [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Received event network-changed-1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.757990] env[61905]: DEBUG nova.compute.manager [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Refreshing instance network info cache due to event network-changed-1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 840.758178] env[61905]: DEBUG oslo_concurrency.lockutils [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] Acquiring lock "refresh_cache-a9ac365e-2be1-438d-a514-6fa7b26fa10c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.764239] env[61905]: DEBUG nova.network.neutron [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.933485] env[61905]: DEBUG nova.network.neutron [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Updating instance_info_cache with network_info: [{"id": "1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb", "address": "fa:16:3e:89:96:2c", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1676e09a-a6", "ovs_interfaceid": "1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.010853] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482361} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.016611] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/12c21d8e-1941-4481-9216-015ba6c09b9b.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.016806] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.017070] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362429, 'name': Rename_Task, 'duration_secs': 0.138636} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.017515] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0270d9a2-c6ab-4b02-9d41-cb9b3b6b9f09 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.019482] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.020332] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f0e9897-f1ee-43ce-b429-2f9163df82a5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.024905] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362430, 'name': PowerOnVM_Task} progress is 74%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.028877] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 841.028877] env[61905]: value = "task-1362432" [ 841.028877] env[61905]: _type = "Task" [ 841.028877] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.032571] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 841.032571] env[61905]: value = "task-1362431" [ 841.032571] env[61905]: _type = "Task" [ 841.032571] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.038910] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a6e45dd1-e0ee-4bda-9513-4b1000e15e49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 841.040204] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.045173] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.439066] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-a9ac365e-2be1-438d-a514-6fa7b26fa10c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.439372] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Instance network_info: |[{"id": "1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb", "address": "fa:16:3e:89:96:2c", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1676e09a-a6", "ovs_interfaceid": "1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 841.439696] env[61905]: DEBUG oslo_concurrency.lockutils [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] Acquired lock "refresh_cache-a9ac365e-2be1-438d-a514-6fa7b26fa10c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.439850] env[61905]: DEBUG nova.network.neutron [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Refreshing network info cache for port 1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 841.441294] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:96:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.448871] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating folder: Project (30edd7bc94ee492cb7f4e4f388e45b8b). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 841.450196] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c356b97-5d94-4370-9687-96a5153836cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.461352] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created folder: Project (30edd7bc94ee492cb7f4e4f388e45b8b) in parent group-v289968. [ 841.461669] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating folder: Instances. Parent ref: group-v290014. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 841.461967] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-305ffc2f-a7d6-4dee-bc7a-27d51ce4c830 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.474838] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created folder: Instances in parent group-v290014. [ 841.475047] env[61905]: DEBUG oslo.service.loopingcall [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.475247] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.475482] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c34d174e-5b7f-4602-be45-3e705773b6f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.495256] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.495256] env[61905]: value = "task-1362435" [ 841.495256] env[61905]: _type = "Task" [ 841.495256] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.518053] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 841.519167] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362435, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.526262] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362430, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.541799] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9a385d72-ba5d-48e0-b71f-d37d4e63c403 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 841.542972] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362432, 'name': PowerOnVM_Task, 'duration_secs': 0.459165} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.543783] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.544185] env[61905]: INFO nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Took 7.32 seconds to spawn the instance on the hypervisor. [ 841.544254] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 841.545285] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd12bed-e2bf-443b-a141-68defc5876fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.554733] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 841.554981] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 841.555152] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.555329] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 841.555473] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.555613] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 841.555890] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 841.556084] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 841.556242] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 841.556440] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 841.556627] env[61905]: DEBUG nova.virt.hardware [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 841.556946] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070255} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.557652] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3a1bd2-7d25-4997-a89d-b781ac3bc036 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.561412] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.562201] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89815a0b-1fff-4e1b-9064-7c9ce9dcb0ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.573573] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de6966f-4a94-4c95-aad2-0687f4b08ce6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.597637] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/12c21d8e-1941-4481-9216-015ba6c09b9b.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.598789] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-325e42e1-e87f-4388-835c-372877a3cdf0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.626655] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 841.626655] env[61905]: value = "task-1362436" [ 841.626655] env[61905]: _type = "Task" [ 841.626655] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.639663] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362436, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.906326] env[61905]: DEBUG nova.network.neutron [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Successfully updated port: ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.005416] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362435, 'name': CreateVM_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.023023] env[61905]: DEBUG oslo_vmware.api [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362430, 'name': PowerOnVM_Task, 'duration_secs': 1.078067} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.023023] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.023023] env[61905]: INFO nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Took 10.22 seconds to spawn the instance on the hypervisor. [ 842.023023] env[61905]: DEBUG nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 842.023023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a132fe47-9ec1-4f70-a6af-4e4a2a7aaefa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.047741] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 4bb7a2df-b472-4f6d-8a01-a55d0b86efda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 842.101860] env[61905]: INFO nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Took 25.39 seconds to build instance. [ 842.135794] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362436, 'name': ReconfigVM_Task, 'duration_secs': 0.264869} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.136081] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/12c21d8e-1941-4481-9216-015ba6c09b9b.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.136705] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-254968f6-f094-4b08-9b2d-2e77acf4c02d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.143260] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 842.143260] env[61905]: value = "task-1362437" [ 842.143260] env[61905]: _type = "Task" [ 842.143260] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.153162] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362437, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.206557] env[61905]: DEBUG nova.network.neutron [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Updated VIF entry in instance network info cache for port 1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.206925] env[61905]: DEBUG nova.network.neutron [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Updating instance_info_cache with network_info: [{"id": "1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb", "address": "fa:16:3e:89:96:2c", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1676e09a-a6", "ovs_interfaceid": "1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.411073] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "refresh_cache-e1a22f3e-4557-44d2-8e34-cc75f573fe41" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.411073] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "refresh_cache-e1a22f3e-4557-44d2-8e34-cc75f573fe41" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.411073] env[61905]: DEBUG nova.network.neutron [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.505879] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362435, 'name': CreateVM_Task, 'duration_secs': 0.674561} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.506029] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.506671] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.506833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.507168] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 842.507416] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12674bb7-ed8e-4f9b-b77c-e96f1e75a9d6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.511947] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 842.511947] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5200717e-5a02-d131-1b41-062ec5ddf9fd" [ 842.511947] env[61905]: _type = "Task" [ 842.511947] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.519529] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5200717e-5a02-d131-1b41-062ec5ddf9fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.544150] env[61905]: INFO nova.compute.manager [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Took 28.53 seconds to build instance. [ 842.551042] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 842.604299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.963s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.653946] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362437, 'name': Rename_Task, 'duration_secs': 0.135336} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.654249] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 842.654495] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a78543e3-9aeb-48b2-a262-f512a9d908eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.660671] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 842.660671] env[61905]: value = "task-1362438" [ 842.660671] env[61905]: _type = "Task" [ 842.660671] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.674984] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.709945] env[61905]: DEBUG oslo_concurrency.lockutils [req-8a3145c2-5594-46ce-a452-5663981f058b req-31acf464-580e-4142-8836-e68083d02979 service nova] Releasing lock "refresh_cache-a9ac365e-2be1-438d-a514-6fa7b26fa10c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.783078] env[61905]: DEBUG nova.compute.manager [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Received event network-vif-plugged-ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.783401] env[61905]: DEBUG oslo_concurrency.lockutils [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] Acquiring lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.783712] env[61905]: DEBUG oslo_concurrency.lockutils [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.783971] env[61905]: DEBUG oslo_concurrency.lockutils [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.784244] env[61905]: DEBUG nova.compute.manager [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] No waiting events found dispatching network-vif-plugged-ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 842.784514] env[61905]: WARNING nova.compute.manager [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Received unexpected event network-vif-plugged-ba90cb84-a34a-4577-a80d-54b1b531778d for instance with vm_state building and task_state spawning. [ 842.784778] env[61905]: DEBUG nova.compute.manager [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Received event network-changed-ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.785042] env[61905]: DEBUG nova.compute.manager [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Refreshing instance network info cache due to event network-changed-ba90cb84-a34a-4577-a80d-54b1b531778d. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 842.785302] env[61905]: DEBUG oslo_concurrency.lockutils [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] Acquiring lock "refresh_cache-e1a22f3e-4557-44d2-8e34-cc75f573fe41" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.975413] env[61905]: DEBUG nova.network.neutron [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.022014] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5200717e-5a02-d131-1b41-062ec5ddf9fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009549} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.022276] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.022502] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.022732] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.022873] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.023059] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.023305] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-139f69b5-c18b-4f2f-a9a2-f8242d8b70ef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.030738] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.030912] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.031567] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bc8a675-6136-4128-ad8b-d26e31970926 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.035986] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 843.035986] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fe7103-1148-4a3b-4c22-22aebbefdd84" [ 843.035986] env[61905]: _type = "Task" [ 843.035986] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.043298] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fe7103-1148-4a3b-4c22-22aebbefdd84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.045730] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c1e7023a-7486-46b3-af75-cdccf1ac1578 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.440s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.053321] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e3b11ed6-b703-43a6-a528-28520ed43233 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.106652] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 843.171046] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362438, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.482330] env[61905]: DEBUG nova.network.neutron [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Updating instance_info_cache with network_info: [{"id": "ba90cb84-a34a-4577-a80d-54b1b531778d", "address": "fa:16:3e:fb:3f:31", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba90cb84-a3", "ovs_interfaceid": "ba90cb84-a34a-4577-a80d-54b1b531778d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.547230] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fe7103-1148-4a3b-4c22-22aebbefdd84, 'name': SearchDatastore_Task, 'duration_secs': 0.007719} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.547989] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-844300f5-3091-4f95-aaf6-cebc461979ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.554139] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 843.554139] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b8fe16-cc57-3dab-3a9b-0129c07501e9" [ 843.554139] env[61905]: _type = "Task" [ 843.554139] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.560263] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b8fe16-cc57-3dab-3a9b-0129c07501e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.671874] env[61905]: DEBUG oslo_vmware.api [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362438, 'name': PowerOnVM_Task, 'duration_secs': 0.827586} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.672038] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 843.672214] env[61905]: INFO nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Took 7.05 seconds to spawn the instance on the hypervisor. [ 843.673019] env[61905]: DEBUG nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 843.673197] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f21c49-4673-433e-a34a-84d0433a80b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.814334] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 38b80675-182a-422c-9222-aa78ed59c351 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.817720] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 843.840789] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.985499] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "refresh_cache-e1a22f3e-4557-44d2-8e34-cc75f573fe41" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.985844] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Instance network_info: |[{"id": "ba90cb84-a34a-4577-a80d-54b1b531778d", "address": "fa:16:3e:fb:3f:31", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba90cb84-a3", "ovs_interfaceid": "ba90cb84-a34a-4577-a80d-54b1b531778d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 843.986019] env[61905]: DEBUG oslo_concurrency.lockutils [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] Acquired lock "refresh_cache-e1a22f3e-4557-44d2-8e34-cc75f573fe41" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.986207] env[61905]: DEBUG nova.network.neutron [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Refreshing network info cache for port ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 843.987432] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:3f:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba90cb84-a34a-4577-a80d-54b1b531778d', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.994887] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating folder: Project (7af9072624d04f669e8183581e6ca50a). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 843.997921] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c640b0a-14bd-4c56-8a35-081ed504979c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.010252] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created folder: Project (7af9072624d04f669e8183581e6ca50a) in parent group-v289968. [ 844.010465] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating folder: Instances. Parent ref: group-v290017. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 844.010734] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1ab3d2d-93f4-4a1a-8bf5-057b6cf37a55 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.020103] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created folder: Instances in parent group-v290017. [ 844.020349] env[61905]: DEBUG oslo.service.loopingcall [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.020540] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.020777] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ec5d1a1-22b1-43f2-9a58-1a54448a42f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.043319] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.043319] env[61905]: value = "task-1362441" [ 844.043319] env[61905]: _type = "Task" [ 844.043319] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.054461] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362441, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.063195] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b8fe16-cc57-3dab-3a9b-0129c07501e9, 'name': SearchDatastore_Task, 'duration_secs': 0.04075} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.063404] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.063659] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] a9ac365e-2be1-438d-a514-6fa7b26fa10c/a9ac365e-2be1-438d-a514-6fa7b26fa10c.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.063915] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d213c7f0-7f86-4461-8542-136842ffd18d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.070963] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 844.070963] env[61905]: value = "task-1362442" [ 844.070963] env[61905]: _type = "Task" [ 844.070963] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.081321] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.191566] env[61905]: INFO nova.compute.manager [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Took 25.74 seconds to build instance. [ 844.222237] env[61905]: DEBUG nova.network.neutron [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Updated VIF entry in instance network info cache for port ba90cb84-a34a-4577-a80d-54b1b531778d. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.222377] env[61905]: DEBUG nova.network.neutron [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Updating instance_info_cache with network_info: [{"id": "ba90cb84-a34a-4577-a80d-54b1b531778d", "address": "fa:16:3e:fb:3f:31", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba90cb84-a3", "ovs_interfaceid": "ba90cb84-a34a-4577-a80d-54b1b531778d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.324707] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 2c919b69-0e09-431d-8a75-98d5740c7dab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 844.343806] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.382034] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.382034] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.382365] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.382602] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.382951] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.385963] env[61905]: INFO nova.compute.manager [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Terminating instance [ 844.388368] env[61905]: DEBUG nova.compute.manager [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 844.388600] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.389873] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fdb04a5-3ac0-4552-b98b-f77ec306e64b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.398267] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 844.398588] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da30e91e-100d-47c6-9be9-90a69892e972 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.405074] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 844.405074] env[61905]: value = "task-1362443" [ 844.405074] env[61905]: _type = "Task" [ 844.405074] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.415550] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.557788] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362441, 'name': CreateVM_Task, 'duration_secs': 0.310258} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.557788] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.559071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.559071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.559418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.559751] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-778e6418-fce9-48d6-b47b-04c0e3095db7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.565660] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 844.565660] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52945a8f-e04b-bf89-fea5-eb17a9b54614" [ 844.565660] env[61905]: _type = "Task" [ 844.565660] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.578852] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52945a8f-e04b-bf89-fea5-eb17a9b54614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.585128] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362442, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.694551] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9504f925-ac28-4373-af5c-fff7615d07b1 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 140.306s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.724749] env[61905]: DEBUG oslo_concurrency.lockutils [req-392233ff-865d-4f41-b961-b5585bafe1a4 req-40d8f077-0189-43d3-a4af-522345a5f9ca service nova] Releasing lock "refresh_cache-e1a22f3e-4557-44d2-8e34-cc75f573fe41" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.829728] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1502df44-9166-4ce8-9117-a57e7be2d299 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 844.915904] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362443, 'name': PowerOffVM_Task, 'duration_secs': 0.220364} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.916117] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.916284] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.916538] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c75f1453-5c9e-494f-87f5-e4843cd72cb2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.976083] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.976331] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.976508] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleting the datastore file [datastore2] aeb72a57-d319-479d-a1c7-3cebc6f73f09 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.976778] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd45f4b7-150d-4e36-91c2-268b6fa350a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.982868] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 844.982868] env[61905]: value = "task-1362445" [ 844.982868] env[61905]: _type = "Task" [ 844.982868] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.991139] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.076569] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52945a8f-e04b-bf89-fea5-eb17a9b54614, 'name': SearchDatastore_Task, 'duration_secs': 0.04102} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.079671] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.079947] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.080207] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.080354] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.080527] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.080782] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9964ad5-94fd-46b2-81b8-623fc9cad5dd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.087709] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60162} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.087939] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] a9ac365e-2be1-438d-a514-6fa7b26fa10c/a9ac365e-2be1-438d-a514-6fa7b26fa10c.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.088152] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.088428] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09fcea26-f5be-49b5-9281-92658ab6e343 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.095202] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 845.095202] env[61905]: value = "task-1362446" [ 845.095202] env[61905]: _type = "Task" [ 845.095202] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.096279] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.096582] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.100891] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c5f798-4eee-488b-b03d-46bd7c037fc3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.109061] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.109344] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 845.109344] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527928de-ce56-8938-af7b-b6a71f3a2041" [ 845.109344] env[61905]: _type = "Task" [ 845.109344] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.116559] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527928de-ce56-8938-af7b-b6a71f3a2041, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.196291] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 845.212545] env[61905]: INFO nova.compute.manager [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Rescuing [ 845.212545] env[61905]: DEBUG oslo_concurrency.lockutils [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.212545] env[61905]: DEBUG oslo_concurrency.lockutils [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.212545] env[61905]: DEBUG nova.network.neutron [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.333099] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 299479fb-9a94-40b8-928d-8e491dbe1af1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 845.493957] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.605531] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064891} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.607026] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.607026] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca972af-617c-4c92-9d99-cfb2485f8d91 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.629583] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] a9ac365e-2be1-438d-a514-6fa7b26fa10c/a9ac365e-2be1-438d-a514-6fa7b26fa10c.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.630230] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26a185c3-ff55-480a-bbfe-d56c550f67ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.648417] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527928de-ce56-8938-af7b-b6a71f3a2041, 'name': SearchDatastore_Task, 'duration_secs': 0.01434} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.649692] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cbc1d43-bcdc-4ae8-91b8-4523c65f3320 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.653407] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 845.653407] env[61905]: value = "task-1362447" [ 845.653407] env[61905]: _type = "Task" [ 845.653407] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.654711] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 845.654711] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c37196-2299-8482-94c8-7d577979d50f" [ 845.654711] env[61905]: _type = "Task" [ 845.654711] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.665478] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362447, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.668386] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c37196-2299-8482-94c8-7d577979d50f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.717517] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.836342] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 0f7ccb34-cb14-4b21-ae61-b066427d400e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 845.836342] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 845.836342] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 845.943637] env[61905]: DEBUG nova.network.neutron [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Updating instance_info_cache with network_info: [{"id": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "address": "fa:16:3e:cb:9d:c2", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ee1c399-6b", "ovs_interfaceid": "8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.996480] env[61905]: DEBUG oslo_vmware.api [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.754099} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.998973] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.999188] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.999368] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.999539] env[61905]: INFO nova.compute.manager [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Took 1.61 seconds to destroy the instance on the hypervisor. [ 845.999778] env[61905]: DEBUG oslo.service.loopingcall [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.000177] env[61905]: DEBUG nova.compute.manager [-] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 846.000277] env[61905]: DEBUG nova.network.neutron [-] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 846.160373] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3783c3e8-0442-40bd-b5e1-9639dbec724d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.171618] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362447, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.176809] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c37196-2299-8482-94c8-7d577979d50f, 'name': SearchDatastore_Task, 'duration_secs': 0.012434} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.177790] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b364d32f-956e-41b1-a8d7-5a622cb31fbb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.180937] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.181234] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e1a22f3e-4557-44d2-8e34-cc75f573fe41/e1a22f3e-4557-44d2-8e34-cc75f573fe41.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.181489] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76240e8b-3f91-4a38-86a4-d78c49317a21 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.215943] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8632cf-d7ab-4101-98dd-921369531ca5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.218869] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 846.218869] env[61905]: value = "task-1362448" [ 846.218869] env[61905]: _type = "Task" [ 846.218869] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.225743] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d67127a-1565-47f6-8575-715d44775917 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.232156] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.241686] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.346454] env[61905]: DEBUG nova.compute.manager [req-ce0e18fd-10e6-4d35-8fd4-161794ce3bc6 req-07f5a5fb-1640-4092-86bf-0ec46fc23b2b service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Received event network-vif-deleted-cebb7714-2d86-4f26-804f-fb6614734c71 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 846.346684] env[61905]: INFO nova.compute.manager [req-ce0e18fd-10e6-4d35-8fd4-161794ce3bc6 req-07f5a5fb-1640-4092-86bf-0ec46fc23b2b service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Neutron deleted interface cebb7714-2d86-4f26-804f-fb6614734c71; detaching it from the instance and deleting it from the info cache [ 846.346860] env[61905]: DEBUG nova.network.neutron [req-ce0e18fd-10e6-4d35-8fd4-161794ce3bc6 req-07f5a5fb-1640-4092-86bf-0ec46fc23b2b service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.450728] env[61905]: DEBUG oslo_concurrency.lockutils [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "refresh_cache-12c21d8e-1941-4481-9216-015ba6c09b9b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.664063] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362447, 'name': ReconfigVM_Task, 'duration_secs': 0.563694} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.664342] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Reconfigured VM instance instance-00000042 to attach disk [datastore1] a9ac365e-2be1-438d-a514-6fa7b26fa10c/a9ac365e-2be1-438d-a514-6fa7b26fa10c.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.664952] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d5f0803-74df-4102-916a-00d217ad9886 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.670856] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 846.670856] env[61905]: value = "task-1362449" [ 846.670856] env[61905]: _type = "Task" [ 846.670856] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.678075] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362449, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.728314] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362448, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.744870] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.825069] env[61905]: DEBUG nova.network.neutron [-] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.849077] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c4318f2-f082-4f74-8707-e94dfb077c25 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.858285] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cdfa73-b997-4f9f-913d-3157e698698b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.885056] env[61905]: DEBUG nova.compute.manager [req-ce0e18fd-10e6-4d35-8fd4-161794ce3bc6 req-07f5a5fb-1640-4092-86bf-0ec46fc23b2b service nova] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Detach interface failed, port_id=cebb7714-2d86-4f26-804f-fb6614734c71, reason: Instance aeb72a57-d319-479d-a1c7-3cebc6f73f09 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 846.981696] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.981993] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93fdde48-30f5-4c90-8fc7-3e652cff7a29 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.990017] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 846.990017] env[61905]: value = "task-1362450" [ 846.990017] env[61905]: _type = "Task" [ 846.990017] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.999217] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.181474] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362449, 'name': Rename_Task, 'duration_secs': 0.167444} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.181980] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.182362] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6efd693e-61d6-4533-bc77-8d3496c3e3df {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.188666] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 847.188666] env[61905]: value = "task-1362451" [ 847.188666] env[61905]: _type = "Task" [ 847.188666] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.197430] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.228990] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.958299} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.230277] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e1a22f3e-4557-44d2-8e34-cc75f573fe41/e1a22f3e-4557-44d2-8e34-cc75f573fe41.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.230277] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.230277] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e32b1fc-b711-4767-aa04-a725acb00d26 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.237437] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 847.237437] env[61905]: value = "task-1362452" [ 847.237437] env[61905]: _type = "Task" [ 847.237437] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.246928] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362452, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.249792] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 847.250030] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.768s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.250311] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.002s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.251809] env[61905]: INFO nova.compute.claims [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.327413] env[61905]: INFO nova.compute.manager [-] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Took 1.33 seconds to deallocate network for instance. [ 847.499632] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362450, 'name': PowerOffVM_Task, 'duration_secs': 0.247269} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.499904] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.500695] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba94948-3ba5-41e9-b55d-47e98a650b71 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.520485] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e31806d-47ee-4c74-8c49-f276cf8809db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.546786] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 847.547076] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1abc540-ca8d-48e7-ae9c-28afd987a6e8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.553661] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 847.553661] env[61905]: value = "task-1362453" [ 847.553661] env[61905]: _type = "Task" [ 847.553661] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.561275] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.698541] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362451, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.745741] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362452, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.328627} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.745999] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.746826] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f19608a-e916-430f-8fde-0fdc79a9cc0c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.774704] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] e1a22f3e-4557-44d2-8e34-cc75f573fe41/e1a22f3e-4557-44d2-8e34-cc75f573fe41.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.778346] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b143415-8853-4d88-a95d-f121545c2e4e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.799695] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 847.799695] env[61905]: value = "task-1362454" [ 847.799695] env[61905]: _type = "Task" [ 847.799695] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.808085] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362454, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.835454] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.055856] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7dbb77-0dd8-488e-b1a8-4af3996144cd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.069595] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] VM already powered off {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 848.069818] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.070065] env[61905]: DEBUG oslo_concurrency.lockutils [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.070215] env[61905]: DEBUG oslo_concurrency.lockutils [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.070385] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.070652] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07bf81bb-4b7b-4838-8391-15c95c86dd7b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.072906] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8de8ea-fe55-4fdd-be31-1694449106ad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.104546] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa2da85-9885-4cc7-b949-decf6b62b425 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.106953] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.107140] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.107805] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-081baf13-a672-4e0e-afad-6ba9c6f7423d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.115451] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97620f9-01df-4119-a1fa-d095bd45ab1e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.118966] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 848.118966] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5269f9f1-80c9-6a5c-915e-bd79a8a5bba5" [ 848.118966] env[61905]: _type = "Task" [ 848.118966] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.129567] env[61905]: DEBUG nova.compute.provider_tree [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.135852] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5269f9f1-80c9-6a5c-915e-bd79a8a5bba5, 'name': SearchDatastore_Task, 'duration_secs': 0.009752} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.136056] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dba2a4ce-0ff9-4216-9efb-0d3bf1500988 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.141084] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 848.141084] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ecb02d-6444-d549-9216-d7a586e45b2b" [ 848.141084] env[61905]: _type = "Task" [ 848.141084] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.148274] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ecb02d-6444-d549-9216-d7a586e45b2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.199664] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362451, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.308674] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.632911] env[61905]: DEBUG nova.scheduler.client.report [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.651560] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ecb02d-6444-d549-9216-d7a586e45b2b, 'name': SearchDatastore_Task, 'duration_secs': 0.009029} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.652498] env[61905]: DEBUG oslo_concurrency.lockutils [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.652763] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk. {{(pid=61905) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 848.653046] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5ee80e1-8f0a-4d5c-ad7c-c4478f21ee70 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.659988] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 848.659988] env[61905]: value = "task-1362455" [ 848.659988] env[61905]: _type = "Task" [ 848.659988] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.668025] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.701068] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362451, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.811543] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.137949] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.887s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.138561] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 849.141247] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.108s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.141445] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.143508] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.008s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.144988] env[61905]: INFO nova.compute.claims [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.171258] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49273} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.173460] env[61905]: INFO nova.scheduler.client.report [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleted allocations for instance a4a03b8a-3206-4684-9d85-0e60ac643175 [ 849.176230] env[61905]: INFO nova.virt.vmwareapi.ds_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk. [ 849.177821] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcc62d3-e3ba-4b6c-a774-8f77ea5a21c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.204456] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.208938] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87b0a98a-2193-4d2c-93f1-6bb64b5080cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.228996] env[61905]: DEBUG oslo_vmware.api [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362451, 'name': PowerOnVM_Task, 'duration_secs': 1.545997} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.230607] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 849.230607] env[61905]: INFO nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Took 10.17 seconds to spawn the instance on the hypervisor. [ 849.230752] env[61905]: DEBUG nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 849.231077] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 849.231077] env[61905]: value = "task-1362456" [ 849.231077] env[61905]: _type = "Task" [ 849.231077] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.231776] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da341c29-cf8e-458d-957a-bc0dcd00fa7f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.246411] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362456, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.311588] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362454, 'name': ReconfigVM_Task, 'duration_secs': 1.197139} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.311744] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Reconfigured VM instance instance-00000043 to attach disk [datastore1] e1a22f3e-4557-44d2-8e34-cc75f573fe41/e1a22f3e-4557-44d2-8e34-cc75f573fe41.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.312524] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24f684f6-fed9-4b23-9f23-681dcece1da2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.319763] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 849.319763] env[61905]: value = "task-1362457" [ 849.319763] env[61905]: _type = "Task" [ 849.319763] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.328378] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362457, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.649879] env[61905]: DEBUG nova.compute.utils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.654621] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 849.654621] env[61905]: DEBUG nova.network.neutron [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.683332] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0a0416e3-1f77-4a26-9a87-1fe3bca4112b tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "a4a03b8a-3206-4684-9d85-0e60ac643175" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.639s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.690887] env[61905]: DEBUG nova.policy [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '947946764fc64847946057d867de54bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '980cc259c0254e84989e0cfc0e45837f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 849.744158] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362456, 'name': ReconfigVM_Task, 'duration_secs': 0.312132} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.744439] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.745268] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d448ac-0144-4cff-ab5f-6e30d8e94ae8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.754097] env[61905]: INFO nova.compute.manager [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Took 29.39 seconds to build instance. [ 849.775863] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7b9b8a0c-53aa-4d42-b121-897513ee6814 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.148s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.776134] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dc4b886-69dc-4c35-9b97-fd723b06759e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.792750] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 849.792750] env[61905]: value = "task-1362458" [ 849.792750] env[61905]: _type = "Task" [ 849.792750] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.801048] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.831139] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362457, 'name': Rename_Task, 'duration_secs': 0.17043} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.831425] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.831680] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bda0698e-4476-4d5b-9bfe-18258daecaa1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.837521] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 849.837521] env[61905]: value = "task-1362459" [ 849.837521] env[61905]: _type = "Task" [ 849.837521] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.844940] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362459, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.952972] env[61905]: DEBUG nova.network.neutron [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Successfully created port: 91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.153614] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 850.294157] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 850.308331] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362458, 'name': ReconfigVM_Task, 'duration_secs': 0.153163} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.312931] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.315229] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a538df5-1672-4126-94df-55236bb02f6f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.320830] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 850.320830] env[61905]: value = "task-1362460" [ 850.320830] env[61905]: _type = "Task" [ 850.320830] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.332377] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.353883] env[61905]: DEBUG oslo_vmware.api [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362459, 'name': PowerOnVM_Task, 'duration_secs': 0.443097} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.356825] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.357051] env[61905]: INFO nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Took 8.84 seconds to spawn the instance on the hypervisor. [ 850.357245] env[61905]: DEBUG nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 850.358328] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f35b6b3-ace0-47c8-8433-27c6de6762bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.463042] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e7c865-e803-43af-a026-60c8cf05655f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.471296] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40ac68e-0963-47a3-9be7-4f8f7fd3e139 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.505230] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100888aa-8340-4753-bb9a-ca3d07c20684 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.507391] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "60e68738-a333-44b2-a1e8-0b3da728059e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.507635] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "60e68738-a333-44b2-a1e8-0b3da728059e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.507835] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "60e68738-a333-44b2-a1e8-0b3da728059e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.508079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "60e68738-a333-44b2-a1e8-0b3da728059e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.508323] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "60e68738-a333-44b2-a1e8-0b3da728059e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.510654] env[61905]: INFO nova.compute.manager [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Terminating instance [ 850.514739] env[61905]: DEBUG nova.compute.manager [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 850.514934] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.516245] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee610323-a562-42d2-b5ef-ae39aea2b100 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.520988] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c888b421-4a1c-4ba9-8cc7-8d12c5031e9d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.536028] env[61905]: DEBUG nova.compute.provider_tree [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.537986] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.538503] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1696633-5b9e-40dd-b375-2d85979c156d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.544190] env[61905]: DEBUG oslo_vmware.api [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 850.544190] env[61905]: value = "task-1362461" [ 850.544190] env[61905]: _type = "Task" [ 850.544190] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.552404] env[61905]: DEBUG oslo_vmware.api [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.817597] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.830795] env[61905]: DEBUG oslo_vmware.api [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362460, 'name': PowerOnVM_Task, 'duration_secs': 0.466394} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.831380] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.834036] env[61905]: DEBUG nova.compute.manager [None req-824a902b-e5c9-42a5-9fd5-31d23e1cbb98 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 850.834780] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893296c6-317f-4c63-9732-4b97571da7e5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.874658] env[61905]: INFO nova.compute.manager [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Took 28.60 seconds to build instance. [ 851.040093] env[61905]: DEBUG nova.scheduler.client.report [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.055292] env[61905]: DEBUG oslo_vmware.api [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362461, 'name': PowerOffVM_Task, 'duration_secs': 0.296917} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.056110] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.056293] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 851.056676] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b88cc3b-0eac-43b5-9eed-2b6686ca7e66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.168954] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 851.176991] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 851.177259] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 851.177500] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleting the datastore file [datastore2] 60e68738-a333-44b2-a1e8-0b3da728059e {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 851.177800] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ecc5b07-98e3-48ba-8d69-13d7b9097024 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.184437] env[61905]: DEBUG oslo_vmware.api [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for the task: (returnval){ [ 851.184437] env[61905]: value = "task-1362463" [ 851.184437] env[61905]: _type = "Task" [ 851.184437] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.194614] env[61905]: DEBUG oslo_vmware.api [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.196677] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.196895] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.197060] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.197245] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.197385] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.197528] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.197730] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.197889] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.198061] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.198225] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.198392] env[61905]: DEBUG nova.virt.hardware [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.199369] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2af930-90b1-4953-b6fc-4f3a68f6e879 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.206753] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de39f7ca-1bb2-45ed-b3c3-4578315970b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.375901] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9188a036-3b1f-4a74-98f6-ce0d56bd7a61 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.496s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.545504] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.547650] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 851.548869] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.537s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.550632] env[61905]: INFO nova.compute.claims [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.690572] env[61905]: DEBUG nova.network.neutron [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Successfully updated port: 91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.697794] env[61905]: DEBUG oslo_vmware.api [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Task: {'id': task-1362463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225398} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.698064] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.698250] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.698421] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.698650] env[61905]: INFO nova.compute.manager [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 851.698894] env[61905]: DEBUG oslo.service.loopingcall [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.699324] env[61905]: DEBUG nova.compute.manager [-] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 851.699408] env[61905]: DEBUG nova.network.neutron [-] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 851.732381] env[61905]: DEBUG nova.compute.manager [req-56a8dc5b-43b0-46d0-bf24-36ac7224f2ff req-86ed7989-d2b5-4ac3-83a7-ef707a1b3fe3 service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Received event network-vif-plugged-91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.732586] env[61905]: DEBUG oslo_concurrency.lockutils [req-56a8dc5b-43b0-46d0-bf24-36ac7224f2ff req-86ed7989-d2b5-4ac3-83a7-ef707a1b3fe3 service nova] Acquiring lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.733268] env[61905]: DEBUG oslo_concurrency.lockutils [req-56a8dc5b-43b0-46d0-bf24-36ac7224f2ff req-86ed7989-d2b5-4ac3-83a7-ef707a1b3fe3 service nova] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.733445] env[61905]: DEBUG oslo_concurrency.lockutils [req-56a8dc5b-43b0-46d0-bf24-36ac7224f2ff req-86ed7989-d2b5-4ac3-83a7-ef707a1b3fe3 service nova] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.733609] env[61905]: DEBUG nova.compute.manager [req-56a8dc5b-43b0-46d0-bf24-36ac7224f2ff req-86ed7989-d2b5-4ac3-83a7-ef707a1b3fe3 service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] No waiting events found dispatching network-vif-plugged-91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 851.733768] env[61905]: WARNING nova.compute.manager [req-56a8dc5b-43b0-46d0-bf24-36ac7224f2ff req-86ed7989-d2b5-4ac3-83a7-ef707a1b3fe3 service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Received unexpected event network-vif-plugged-91bfd878-6423-4f5a-9645-1fb1d70de825 for instance with vm_state building and task_state spawning. [ 851.861459] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.861562] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.879339] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 852.007455] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7da205-7f90-486e-89b8-8480e7567922 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.014599] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Suspending the VM {{(pid=61905) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 852.014879] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-30f5ef22-fa50-4c00-b0aa-80a94c492db7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.021156] env[61905]: DEBUG oslo_vmware.api [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 852.021156] env[61905]: value = "task-1362464" [ 852.021156] env[61905]: _type = "Task" [ 852.021156] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.028244] env[61905]: DEBUG oslo_vmware.api [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362464, 'name': SuspendVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.055719] env[61905]: DEBUG nova.compute.utils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.057562] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 852.057779] env[61905]: DEBUG nova.network.neutron [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.104431] env[61905]: DEBUG nova.policy [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '607efe9546ea4925b40425a536d6ae81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9c343ce0dc043d4b39b04dc6bdc70aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 852.193581] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-a6e45dd1-e0ee-4bda-9513-4b1000e15e49" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.193902] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-a6e45dd1-e0ee-4bda-9513-4b1000e15e49" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.193902] env[61905]: DEBUG nova.network.neutron [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.365341] env[61905]: DEBUG nova.compute.utils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 852.387305] env[61905]: DEBUG nova.network.neutron [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Successfully created port: e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.404940] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.438938] env[61905]: DEBUG nova.network.neutron [-] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.531504] env[61905]: DEBUG oslo_vmware.api [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362464, 'name': SuspendVM_Task} progress is 62%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.563529] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 852.727520] env[61905]: DEBUG nova.network.neutron [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.860561] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f56e0bc-2274-43cc-9115-25ca2987b455 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.869216] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607a702c-6133-43ad-971b-b6cbf3ce3f6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.873203] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.906908] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdebf96-5394-403c-9a54-0d50890d2baf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.917911] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2d6afb-5254-41bd-93d1-66c74c03770d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.932292] env[61905]: DEBUG nova.compute.provider_tree [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.938014] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "e9e00459-e685-431b-b194-cf426c7a743e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.938341] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "e9e00459-e685-431b-b194-cf426c7a743e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.941209] env[61905]: INFO nova.compute.manager [-] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Took 1.24 seconds to deallocate network for instance. [ 852.965378] env[61905]: DEBUG nova.network.neutron [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Updating instance_info_cache with network_info: [{"id": "91bfd878-6423-4f5a-9645-1fb1d70de825", "address": "fa:16:3e:a3:8a:a7", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91bfd878-64", "ovs_interfaceid": "91bfd878-6423-4f5a-9645-1fb1d70de825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.033029] env[61905]: DEBUG oslo_vmware.api [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362464, 'name': SuspendVM_Task, 'duration_secs': 0.658306} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.033284] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Suspended the VM {{(pid=61905) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 853.033464] env[61905]: DEBUG nova.compute.manager [None req-989e0858-8312-447b-9c7a-a104370402fe tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 853.034255] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5b6220-fb11-41e1-872f-2127206e1934 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.438397] env[61905]: DEBUG nova.scheduler.client.report [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.448170] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.468562] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-a6e45dd1-e0ee-4bda-9513-4b1000e15e49" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.468562] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance network_info: |[{"id": "91bfd878-6423-4f5a-9645-1fb1d70de825", "address": "fa:16:3e:a3:8a:a7", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91bfd878-64", "ovs_interfaceid": "91bfd878-6423-4f5a-9645-1fb1d70de825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 853.469386] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:8a:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91bfd878-6423-4f5a-9645-1fb1d70de825', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.476467] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating folder: Project (980cc259c0254e84989e0cfc0e45837f). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.477018] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f329882e-db6e-44a5-9616-8dd7daae4c31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.486918] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created folder: Project (980cc259c0254e84989e0cfc0e45837f) in parent group-v289968. [ 853.487760] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating folder: Instances. Parent ref: group-v290020. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.487760] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99f02859-6de5-4784-921e-529a14bcf4b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.495983] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created folder: Instances in parent group-v290020. [ 853.496156] env[61905]: DEBUG oslo.service.loopingcall [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.496346] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.496547] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1f1c413-f2dd-49b0-8ebc-fedf1589fa5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.519281] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.519281] env[61905]: value = "task-1362467" [ 853.519281] env[61905]: _type = "Task" [ 853.519281] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.526499] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362467, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.576767] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 853.603360] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.603607] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.603763] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.603936] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.604099] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.604249] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.604450] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.604646] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.604800] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.605053] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.605216] env[61905]: DEBUG nova.virt.hardware [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.606091] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d92704-a379-4c55-811c-085354fe3bfb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.614441] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ab52a4-e80b-42a4-aa35-6abcc08a4746 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.797286] env[61905]: DEBUG nova.compute.manager [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Received event network-changed-91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.797636] env[61905]: DEBUG nova.compute.manager [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Refreshing instance network info cache due to event network-changed-91bfd878-6423-4f5a-9645-1fb1d70de825. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 853.798012] env[61905]: DEBUG oslo_concurrency.lockutils [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] Acquiring lock "refresh_cache-a6e45dd1-e0ee-4bda-9513-4b1000e15e49" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.798286] env[61905]: DEBUG oslo_concurrency.lockutils [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] Acquired lock "refresh_cache-a6e45dd1-e0ee-4bda-9513-4b1000e15e49" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.798610] env[61905]: DEBUG nova.network.neutron [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Refreshing network info cache for port 91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.924114] env[61905]: DEBUG nova.compute.manager [req-8c8ae450-4114-4cd9-962e-a76ff82a2c91 req-8d1995e0-35ea-47e1-b9ab-8964e0639c6e service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Received event network-vif-plugged-e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.924350] env[61905]: DEBUG oslo_concurrency.lockutils [req-8c8ae450-4114-4cd9-962e-a76ff82a2c91 req-8d1995e0-35ea-47e1-b9ab-8964e0639c6e service nova] Acquiring lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.924556] env[61905]: DEBUG oslo_concurrency.lockutils [req-8c8ae450-4114-4cd9-962e-a76ff82a2c91 req-8d1995e0-35ea-47e1-b9ab-8964e0639c6e service nova] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.924754] env[61905]: DEBUG oslo_concurrency.lockutils [req-8c8ae450-4114-4cd9-962e-a76ff82a2c91 req-8d1995e0-35ea-47e1-b9ab-8964e0639c6e service nova] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.924939] env[61905]: DEBUG nova.compute.manager [req-8c8ae450-4114-4cd9-962e-a76ff82a2c91 req-8d1995e0-35ea-47e1-b9ab-8964e0639c6e service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] No waiting events found dispatching network-vif-plugged-e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 853.925365] env[61905]: WARNING nova.compute.manager [req-8c8ae450-4114-4cd9-962e-a76ff82a2c91 req-8d1995e0-35ea-47e1-b9ab-8964e0639c6e service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Received unexpected event network-vif-plugged-e1eae77b-b6ce-4fab-9407-143d4f0555ff for instance with vm_state building and task_state spawning. [ 853.936371] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.936682] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.936771] env[61905]: INFO nova.compute.manager [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Attaching volume 2f6f8122-2df2-4b93-a09d-7ce9b009fece to /dev/sdb [ 853.945094] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.945575] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 853.948598] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.463s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.951747] env[61905]: INFO nova.compute.claims [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.982106] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999b0dde-37d8-4e3a-a8fe-1712a31b8eb4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.991338] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822f3eef-f031-4d6c-bb37-3b4aad1aa969 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.005777] env[61905]: DEBUG nova.virt.block_device [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Updating existing volume attachment record: 625cfebc-f5db-43cc-bc11-6bc369f7f10f {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 854.028074] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362467, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.414907] env[61905]: DEBUG nova.network.neutron [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Successfully updated port: e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.456576] env[61905]: DEBUG nova.compute.utils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.459817] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 854.459981] env[61905]: DEBUG nova.network.neutron [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 854.510453] env[61905]: DEBUG nova.policy [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49efd26f933749618e9c55ea216836f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56c24892a9e54d398de027367a3640a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 854.529236] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362467, 'name': CreateVM_Task, 'duration_secs': 0.728245} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.529423] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.530131] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.530288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.530603] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.530849] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc42e007-04cf-4138-87a1-308c5b44d3d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.535311] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 854.535311] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5227c9f1-be10-51bf-703d-0a4d632f9300" [ 854.535311] env[61905]: _type = "Task" [ 854.535311] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.543294] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5227c9f1-be10-51bf-703d-0a4d632f9300, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.544065] env[61905]: DEBUG nova.network.neutron [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Updated VIF entry in instance network info cache for port 91bfd878-6423-4f5a-9645-1fb1d70de825. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.544391] env[61905]: DEBUG nova.network.neutron [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Updating instance_info_cache with network_info: [{"id": "91bfd878-6423-4f5a-9645-1fb1d70de825", "address": "fa:16:3e:a3:8a:a7", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91bfd878-64", "ovs_interfaceid": "91bfd878-6423-4f5a-9645-1fb1d70de825", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.744094] env[61905]: DEBUG nova.compute.manager [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.744861] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba05ed59-d8a6-4abc-8adc-8c8879ac15cb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.790192] env[61905]: DEBUG nova.network.neutron [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Successfully created port: 94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.923604] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.923604] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.923604] env[61905]: DEBUG nova.network.neutron [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.961166] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 855.044556] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5227c9f1-be10-51bf-703d-0a4d632f9300, 'name': SearchDatastore_Task, 'duration_secs': 0.009471} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.046866] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.047117] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.047344] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.047489] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.047663] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.048139] env[61905]: DEBUG oslo_concurrency.lockutils [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] Releasing lock "refresh_cache-a6e45dd1-e0ee-4bda-9513-4b1000e15e49" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.048347] env[61905]: DEBUG nova.compute.manager [req-eb1a31c8-3c9b-4e4f-8c5d-ec819a09c301 req-77a97db8-7591-4ac9-b7d7-b8fcd31510ef service nova] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Received event network-vif-deleted-257550cb-7da2-4dee-9d64-19c0c56c22e1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.048865] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ad5d5e3-0572-4f87-bc19-a4bf5d459684 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.056618] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.056792] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.059509] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30023230-da05-4f3f-911f-c6b1cca43a48 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.064449] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 855.064449] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528be877-f1c2-9a48-7f2d-6e2bf6d69c76" [ 855.064449] env[61905]: _type = "Task" [ 855.064449] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.071829] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528be877-f1c2-9a48-7f2d-6e2bf6d69c76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.219976] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22b711e-dc38-4304-95c6-fc72a50d2faa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.227157] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e260b08e-3eca-4623-a1f1-33c106446cc4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.256150] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1970fd9-3236-4561-9aba-c1e371684a32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.259249] env[61905]: INFO nova.compute.manager [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] instance snapshotting [ 855.259436] env[61905]: WARNING nova.compute.manager [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 855.262069] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711a3483-6f71-43b1-aa63-cf07dadbfcb2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.267496] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f69ae6f-d4cc-43dd-9958-cfc1e7c2cbc2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.285760] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd178355-843d-457f-9785-6cf96e14bf3d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.295478] env[61905]: DEBUG nova.compute.provider_tree [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.456340] env[61905]: DEBUG nova.network.neutron [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.575156] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528be877-f1c2-9a48-7f2d-6e2bf6d69c76, 'name': SearchDatastore_Task, 'duration_secs': 0.013325} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.575950] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1694ece-98ca-4c92-8604-37df4ac38122 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.581638] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 855.581638] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5210f340-490f-d465-2239-b6beeea33d5d" [ 855.581638] env[61905]: _type = "Task" [ 855.581638] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.589293] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5210f340-490f-d465-2239-b6beeea33d5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.604012] env[61905]: DEBUG nova.network.neutron [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updating instance_info_cache with network_info: [{"id": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "address": "fa:16:3e:f3:57:fa", "network": {"id": "543f478c-b8f2-4bd5-9354-e98f24dc9594", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-715852827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9c343ce0dc043d4b39b04dc6bdc70aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1eae77b-b6", "ovs_interfaceid": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.798648] env[61905]: DEBUG nova.scheduler.client.report [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.804146] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Creating Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 855.804438] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-65548fa2-d5b2-4b19-9f3b-1a000bcaae5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.812625] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 855.812625] env[61905]: value = "task-1362471" [ 855.812625] env[61905]: _type = "Task" [ 855.812625] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.821173] env[61905]: DEBUG nova.compute.manager [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Received event network-changed-e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.821431] env[61905]: DEBUG nova.compute.manager [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Refreshing instance network info cache due to event network-changed-e1eae77b-b6ce-4fab-9407-143d4f0555ff. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 855.821751] env[61905]: DEBUG oslo_concurrency.lockutils [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] Acquiring lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.826978] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362471, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.975172] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 855.998074] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.998074] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.998074] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.998074] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.998074] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.998556] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.998556] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.998659] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.998800] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.998965] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.999155] env[61905]: DEBUG nova.virt.hardware [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.000067] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc8fe68-4dcc-45a8-80e7-db4509200710 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.008198] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18296794-8807-45e7-91cf-3717c20e99a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.092668] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5210f340-490f-d465-2239-b6beeea33d5d, 'name': SearchDatastore_Task, 'duration_secs': 0.010689} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.092921] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.093185] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.093437] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-523cb00b-ae04-4f00-967b-f030182bf418 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.100229] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 856.100229] env[61905]: value = "task-1362472" [ 856.100229] env[61905]: _type = "Task" [ 856.100229] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.109544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Releasing lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.109835] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Instance network_info: |[{"id": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "address": "fa:16:3e:f3:57:fa", "network": {"id": "543f478c-b8f2-4bd5-9354-e98f24dc9594", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-715852827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9c343ce0dc043d4b39b04dc6bdc70aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1eae77b-b6", "ovs_interfaceid": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 856.110239] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.110288] env[61905]: DEBUG oslo_concurrency.lockutils [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] Acquired lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.110452] env[61905]: DEBUG nova.network.neutron [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Refreshing network info cache for port e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.111773] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:57:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cde23701-02ca-4cb4-b5a6-d321f8ac9660', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1eae77b-b6ce-4fab-9407-143d4f0555ff', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.119716] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Creating folder: Project (e9c343ce0dc043d4b39b04dc6bdc70aa). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.122524] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e1b6e98-4062-46ee-9d5c-a40997d3c715 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.133366] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Created folder: Project (e9c343ce0dc043d4b39b04dc6bdc70aa) in parent group-v289968. [ 856.133696] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Creating folder: Instances. Parent ref: group-v290025. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.134060] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-22da9d2e-2400-4e06-8d70-9623fd016d52 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.144644] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Created folder: Instances in parent group-v290025. [ 856.145015] env[61905]: DEBUG oslo.service.loopingcall [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.145292] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.145597] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e36ec038-7c41-4fc0-860e-6b2c2b63a3fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.169852] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.169852] env[61905]: value = "task-1362475" [ 856.169852] env[61905]: _type = "Task" [ 856.169852] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.179627] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362475, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.283473] env[61905]: DEBUG nova.compute.manager [req-481a968d-a438-4db6-8440-2113dc6c9166 req-ab901e09-5aa3-4062-a5aa-6f837184abd3 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Received event network-vif-plugged-94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 856.283773] env[61905]: DEBUG oslo_concurrency.lockutils [req-481a968d-a438-4db6-8440-2113dc6c9166 req-ab901e09-5aa3-4062-a5aa-6f837184abd3 service nova] Acquiring lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.284235] env[61905]: DEBUG oslo_concurrency.lockutils [req-481a968d-a438-4db6-8440-2113dc6c9166 req-ab901e09-5aa3-4062-a5aa-6f837184abd3 service nova] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.284652] env[61905]: DEBUG oslo_concurrency.lockutils [req-481a968d-a438-4db6-8440-2113dc6c9166 req-ab901e09-5aa3-4062-a5aa-6f837184abd3 service nova] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.284995] env[61905]: DEBUG nova.compute.manager [req-481a968d-a438-4db6-8440-2113dc6c9166 req-ab901e09-5aa3-4062-a5aa-6f837184abd3 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] No waiting events found dispatching network-vif-plugged-94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 856.285245] env[61905]: WARNING nova.compute.manager [req-481a968d-a438-4db6-8440-2113dc6c9166 req-ab901e09-5aa3-4062-a5aa-6f837184abd3 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Received unexpected event network-vif-plugged-94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 for instance with vm_state building and task_state spawning. [ 856.308187] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.308187] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 856.310951] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.922s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.311303] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.313715] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.513s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.314041] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.316577] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.694s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.318780] env[61905]: INFO nova.compute.claims [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.336554] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362471, 'name': CreateSnapshot_Task, 'duration_secs': 0.496299} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.337649] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Created Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 856.339240] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b7f900-12ba-4161-8dc8-dcef8c663d3c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.354388] env[61905]: INFO nova.scheduler.client.report [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Deleted allocations for instance d31570f0-7662-4e13-9dee-51dc66728acc [ 856.355792] env[61905]: INFO nova.scheduler.client.report [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Deleted allocations for instance fed05097-de84-4617-bf9e-7fc116ebc56e [ 856.445065] env[61905]: DEBUG nova.network.neutron [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updated VIF entry in instance network info cache for port e1eae77b-b6ce-4fab-9407-143d4f0555ff. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.445509] env[61905]: DEBUG nova.network.neutron [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updating instance_info_cache with network_info: [{"id": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "address": "fa:16:3e:f3:57:fa", "network": {"id": "543f478c-b8f2-4bd5-9354-e98f24dc9594", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-715852827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9c343ce0dc043d4b39b04dc6bdc70aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1eae77b-b6", "ovs_interfaceid": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.610074] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362472, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.683553] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362475, 'name': CreateVM_Task, 'duration_secs': 0.351998} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.683553] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.683553] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.684028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.684168] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 856.684425] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd30669b-0e9a-4fde-bedc-266a0d8e5d43 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.689676] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 856.689676] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5250e7a9-40d3-e151-b3d8-6e8eab19b127" [ 856.689676] env[61905]: _type = "Task" [ 856.689676] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.701603] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5250e7a9-40d3-e151-b3d8-6e8eab19b127, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.826947] env[61905]: DEBUG nova.compute.utils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.832870] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 856.833083] env[61905]: DEBUG nova.network.neutron [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 856.861304] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Creating linked-clone VM from snapshot {{(pid=61905) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 856.862625] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b660b895-a579-4764-a7a6-bf1bb6ffa2fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.874101] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b97b524f-fae9-4d6d-9661-94edbbbee4e8 tempest-ServersTestMultiNic-2015078016 tempest-ServersTestMultiNic-2015078016-project-member] Lock "d31570f0-7662-4e13-9dee-51dc66728acc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.717s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.874101] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d49d2392-befb-4ce7-8b77-ef3f63563685 tempest-VolumesAdminNegativeTest-363631365 tempest-VolumesAdminNegativeTest-363631365-project-member] Lock "fed05097-de84-4617-bf9e-7fc116ebc56e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.172s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.878785] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 856.878785] env[61905]: value = "task-1362477" [ 856.878785] env[61905]: _type = "Task" [ 856.878785] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.889778] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362477, 'name': CloneVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.925090] env[61905]: DEBUG nova.policy [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bfa0b4bb2824d01b2fb40f690f38037', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad70dc5f83f24efe8ddd740d942f6362', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 856.949449] env[61905]: DEBUG oslo_concurrency.lockutils [req-d1a813c2-b09c-44ae-bb1f-4f783f5773e4 req-65226f35-f0c7-4b3b-846d-5aee2a51d8f2 service nova] Releasing lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.023665] env[61905]: DEBUG nova.network.neutron [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Successfully updated port: 94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.110923] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568972} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.111581] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.111794] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.112224] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58cdd44d-ad8c-48e5-9452-e57139d38c80 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.119922] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 857.119922] env[61905]: value = "task-1362478" [ 857.119922] env[61905]: _type = "Task" [ 857.119922] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.128894] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.201827] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5250e7a9-40d3-e151-b3d8-6e8eab19b127, 'name': SearchDatastore_Task, 'duration_secs': 0.012787} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.202196] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.202266] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.202792] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.202792] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.203304] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.203304] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7adcaa1-1896-4299-b3dc-90cf51f00d12 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.218435] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.218594] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.219461] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04a97bc6-94dc-4cdb-a957-4f032bcad203 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.225037] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 857.225037] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521458bc-4add-719a-dd3e-cd5691d79991" [ 857.225037] env[61905]: _type = "Task" [ 857.225037] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.233810] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521458bc-4add-719a-dd3e-cd5691d79991, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.285973] env[61905]: DEBUG nova.network.neutron [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Successfully created port: 6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.337897] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 857.394901] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362477, 'name': CloneVM_Task} progress is 93%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.535593] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "refresh_cache-4bb7a2df-b472-4f6d-8a01-a55d0b86efda" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.535593] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquired lock "refresh_cache-4bb7a2df-b472-4f6d-8a01-a55d0b86efda" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.535593] env[61905]: DEBUG nova.network.neutron [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 857.631364] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066157} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.631364] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.631364] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61280a5f-4c96-44bb-bb7a-bd06cf5505ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.659389] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.659775] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73cca95a-042f-4f69-933e-688fb374c146 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.682835] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 857.682835] env[61905]: value = "task-1362479" [ 857.682835] env[61905]: _type = "Task" [ 857.682835] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.692658] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362479, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.709227] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad69b29c-715e-4998-b33d-c96e0a0cf076 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.716416] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a84780-9a00-48da-ab80-de7bad2a5085 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.751766] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725261aa-d148-48bf-b9e4-ebd2e3599bea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.759880] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521458bc-4add-719a-dd3e-cd5691d79991, 'name': SearchDatastore_Task, 'duration_secs': 0.028223} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.761997] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc466ee0-3434-4c70-abd9-95c070f633b8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.765033] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0179472-62cd-43b6-a7b1-cdb9689f8259 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.779496] env[61905]: DEBUG nova.compute.provider_tree [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.781719] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 857.781719] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528bd5e7-4aa9-139f-17ef-b35a40d4c9ab" [ 857.781719] env[61905]: _type = "Task" [ 857.781719] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.790366] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528bd5e7-4aa9-139f-17ef-b35a40d4c9ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.860472] env[61905]: DEBUG nova.compute.manager [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Received event network-changed-94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.860681] env[61905]: DEBUG nova.compute.manager [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Refreshing instance network info cache due to event network-changed-94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 857.860883] env[61905]: DEBUG oslo_concurrency.lockutils [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] Acquiring lock "refresh_cache-4bb7a2df-b472-4f6d-8a01-a55d0b86efda" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.890505] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362477, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.087869] env[61905]: DEBUG nova.network.neutron [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 858.192843] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362479, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.283889] env[61905]: DEBUG nova.scheduler.client.report [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 858.296900] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528bd5e7-4aa9-139f-17ef-b35a40d4c9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.018835} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.297200] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.297521] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 9a385d72-ba5d-48e0-b71f-d37d4e63c403/9a385d72-ba5d-48e0-b71f-d37d4e63c403.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.297742] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d7d0b00-e180-4f81-9c00-0b6824c28336 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.305031] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 858.305031] env[61905]: value = "task-1362480" [ 858.305031] env[61905]: _type = "Task" [ 858.305031] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.315545] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.348090] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 858.371959] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 858.372230] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 858.372382] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.372559] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 858.372704] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.372849] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 858.373057] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 858.373215] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 858.373376] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 858.373531] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 858.373744] env[61905]: DEBUG nova.virt.hardware [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 858.374563] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c945ccbc-8f54-479f-8656-49e3a47abe36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.386061] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19a3e9f-878e-4ed7-8693-52f3e06897b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.395773] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362477, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.484033] env[61905]: DEBUG nova.network.neutron [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Updating instance_info_cache with network_info: [{"id": "94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00", "address": "fa:16:3e:08:45:15", "network": {"id": "b2f2974e-650e-412b-bf54-36b0fb7a0f56", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1427823786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56c24892a9e54d398de027367a3640a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d96dc3-3d", "ovs_interfaceid": "94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.558475] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 858.559047] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290024', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'name': 'volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a9ac365e-2be1-438d-a514-6fa7b26fa10c', 'attached_at': '', 'detached_at': '', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'serial': '2f6f8122-2df2-4b93-a09d-7ce9b009fece'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 858.559804] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc3599c-239f-4d77-9842-ee193a6c1912 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.584437] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5508367-cca5-4c10-8644-d10f3a6d36f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.610844] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece/volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.611265] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c017069c-2eaa-4e0d-a60e-1b36e03a4af2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.629624] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 858.629624] env[61905]: value = "task-1362481" [ 858.629624] env[61905]: _type = "Task" [ 858.629624] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.642577] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.701543] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362479, 'name': ReconfigVM_Task, 'duration_secs': 0.53097} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.702018] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Reconfigured VM instance instance-00000044 to attach disk [datastore1] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.702657] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a6db51d-10fc-49c6-aa2c-78b3af11bdc1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.710980] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 858.710980] env[61905]: value = "task-1362482" [ 858.710980] env[61905]: _type = "Task" [ 858.710980] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.722972] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362482, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.793171] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.793842] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 858.801091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.067s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.801091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.801091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.960s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.802326] env[61905]: INFO nova.compute.claims [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.819388] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362480, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.834436] env[61905]: INFO nova.scheduler.client.report [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Deleted allocations for instance ba3a1e36-a9f8-4482-908e-9c949c6f42ec [ 858.896641] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362477, 'name': CloneVM_Task, 'duration_secs': 1.897275} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.897529] env[61905]: INFO nova.virt.vmwareapi.vmops [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Created linked-clone VM from snapshot [ 858.898312] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a98963-c811-4a93-ae83-ec750fa37e0d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.906710] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Uploading image aef12d3a-bf73-4159-9d46-dd0a3ceba133 {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 858.930283] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 858.930283] env[61905]: value = "vm-290029" [ 858.930283] env[61905]: _type = "VirtualMachine" [ 858.930283] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 858.930579] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-88f3f276-8650-420d-986e-25f7a1c317f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.938729] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lease: (returnval){ [ 858.938729] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a393a-e8b9-4e5d-16bb-1f24ecdc7966" [ 858.938729] env[61905]: _type = "HttpNfcLease" [ 858.938729] env[61905]: } obtained for exporting VM: (result){ [ 858.938729] env[61905]: value = "vm-290029" [ 858.938729] env[61905]: _type = "VirtualMachine" [ 858.938729] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 858.939247] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the lease: (returnval){ [ 858.939247] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a393a-e8b9-4e5d-16bb-1f24ecdc7966" [ 858.939247] env[61905]: _type = "HttpNfcLease" [ 858.939247] env[61905]: } to be ready. {{(pid=61905) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 858.950649] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 858.950649] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a393a-e8b9-4e5d-16bb-1f24ecdc7966" [ 858.950649] env[61905]: _type = "HttpNfcLease" [ 858.950649] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 858.988096] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Releasing lock "refresh_cache-4bb7a2df-b472-4f6d-8a01-a55d0b86efda" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.988471] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Instance network_info: |[{"id": "94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00", "address": "fa:16:3e:08:45:15", "network": {"id": "b2f2974e-650e-412b-bf54-36b0fb7a0f56", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1427823786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56c24892a9e54d398de027367a3640a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d96dc3-3d", "ovs_interfaceid": "94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 858.988815] env[61905]: DEBUG oslo_concurrency.lockutils [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] Acquired lock "refresh_cache-4bb7a2df-b472-4f6d-8a01-a55d0b86efda" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.988993] env[61905]: DEBUG nova.network.neutron [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Refreshing network info cache for port 94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 858.990343] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:45:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99be9a5e-b3f9-4e6c-83d5-df11f817847d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.998400] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Creating folder: Project (56c24892a9e54d398de027367a3640a7). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 858.998830] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24409d49-d793-4901-81f3-06250b262a9f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.010710] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Created folder: Project (56c24892a9e54d398de027367a3640a7) in parent group-v289968. [ 859.010971] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Creating folder: Instances. Parent ref: group-v290030. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 859.011522] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a59b2e16-9679-4a7e-af4f-948a0f03431a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.022971] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Created folder: Instances in parent group-v290030. [ 859.023237] env[61905]: DEBUG oslo.service.loopingcall [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.023444] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.023724] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3258a399-6234-4a13-b09f-fcf1d2b2d30a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.053799] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 859.053799] env[61905]: value = "task-1362486" [ 859.053799] env[61905]: _type = "Task" [ 859.053799] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.066260] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362486, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.068096] env[61905]: DEBUG nova.compute.manager [req-5947b9d8-c0fa-4498-a618-a2c847477cb5 req-61ef02d6-02fe-4885-abcd-94ad725eaf58 service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Received event network-vif-plugged-6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.068096] env[61905]: DEBUG oslo_concurrency.lockutils [req-5947b9d8-c0fa-4498-a618-a2c847477cb5 req-61ef02d6-02fe-4885-abcd-94ad725eaf58 service nova] Acquiring lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.068096] env[61905]: DEBUG oslo_concurrency.lockutils [req-5947b9d8-c0fa-4498-a618-a2c847477cb5 req-61ef02d6-02fe-4885-abcd-94ad725eaf58 service nova] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.068096] env[61905]: DEBUG oslo_concurrency.lockutils [req-5947b9d8-c0fa-4498-a618-a2c847477cb5 req-61ef02d6-02fe-4885-abcd-94ad725eaf58 service nova] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.068324] env[61905]: DEBUG nova.compute.manager [req-5947b9d8-c0fa-4498-a618-a2c847477cb5 req-61ef02d6-02fe-4885-abcd-94ad725eaf58 service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] No waiting events found dispatching network-vif-plugged-6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 859.068591] env[61905]: WARNING nova.compute.manager [req-5947b9d8-c0fa-4498-a618-a2c847477cb5 req-61ef02d6-02fe-4885-abcd-94ad725eaf58 service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Received unexpected event network-vif-plugged-6fcd65df-60b2-46d9-bc62-81c3b64ca522 for instance with vm_state building and task_state spawning. [ 859.143061] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362481, 'name': ReconfigVM_Task, 'duration_secs': 0.490983} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.143061] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Reconfigured VM instance instance-00000042 to attach disk [datastore1] volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece/volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.149252] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-010acd44-9fac-4753-88cd-6f055a49805c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.165846] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 859.165846] env[61905]: value = "task-1362487" [ 859.165846] env[61905]: _type = "Task" [ 859.165846] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.174270] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362487, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.221936] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362482, 'name': Rename_Task, 'duration_secs': 0.277579} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.221936] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.223739] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da239ff6-adb6-4b9f-9776-607185862ce3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.228761] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 859.228761] env[61905]: value = "task-1362488" [ 859.228761] env[61905]: _type = "Task" [ 859.228761] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.239024] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.287485] env[61905]: DEBUG nova.network.neutron [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Successfully updated port: 6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.309950] env[61905]: DEBUG nova.compute.utils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 859.317335] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 859.317335] env[61905]: DEBUG nova.network.neutron [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.326172] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362480, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567147} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.326498] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 9a385d72-ba5d-48e0-b71f-d37d4e63c403/9a385d72-ba5d-48e0-b71f-d37d4e63c403.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.326634] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.326897] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8dc6c5ab-744e-48da-9462-79cf17ac21a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.334565] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 859.334565] env[61905]: value = "task-1362489" [ 859.334565] env[61905]: _type = "Task" [ 859.334565] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.344071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-25df1fee-885c-41f8-a787-e6389b6ae690 tempest-InstanceActionsNegativeTestJSON-2067407335 tempest-InstanceActionsNegativeTestJSON-2067407335-project-member] Lock "ba3a1e36-a9f8-4482-908e-9c949c6f42ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.738s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.350142] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.413017] env[61905]: DEBUG nova.policy [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 859.449205] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.449205] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a393a-e8b9-4e5d-16bb-1f24ecdc7966" [ 859.449205] env[61905]: _type = "HttpNfcLease" [ 859.449205] env[61905]: } is ready. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 859.449487] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 859.449487] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a393a-e8b9-4e5d-16bb-1f24ecdc7966" [ 859.449487] env[61905]: _type = "HttpNfcLease" [ 859.449487] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 859.450223] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddc5456-762b-494c-966d-f7c0856cbc2c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.459418] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527128a4-7868-d0c0-99fe-c9bddc3f2060/disk-0.vmdk from lease info. {{(pid=61905) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 859.459596] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527128a4-7868-d0c0-99fe-c9bddc3f2060/disk-0.vmdk for reading. {{(pid=61905) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 859.564404] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362486, 'name': CreateVM_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.571860] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-308b9c38-121b-4931-a328-c02c787ca2bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.679044] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362487, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.742019] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362488, 'name': PowerOnVM_Task} progress is 76%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.794477] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "refresh_cache-020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.794477] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquired lock "refresh_cache-020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.794477] env[61905]: DEBUG nova.network.neutron [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 859.813675] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 859.845011] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.362808} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.845445] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.846334] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7b8630-2afa-478f-b14a-db3788fafb66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.870909] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 9a385d72-ba5d-48e0-b71f-d37d4e63c403/9a385d72-ba5d-48e0-b71f-d37d4e63c403.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.874469] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27c25b88-0d8c-401d-bb34-167bf6011855 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.896226] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 859.896226] env[61905]: value = "task-1362490" [ 859.896226] env[61905]: _type = "Task" [ 859.896226] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.908505] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.070898] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362486, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.133287] env[61905]: DEBUG nova.network.neutron [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Updated VIF entry in instance network info cache for port 94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 860.134185] env[61905]: DEBUG nova.network.neutron [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Updating instance_info_cache with network_info: [{"id": "94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00", "address": "fa:16:3e:08:45:15", "network": {"id": "b2f2974e-650e-412b-bf54-36b0fb7a0f56", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1427823786-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56c24892a9e54d398de027367a3640a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99be9a5e-b3f9-4e6c-83d5-df11f817847d", "external-id": "nsx-vlan-transportzone-566", "segmentation_id": 566, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94d96dc3-3d", "ovs_interfaceid": "94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.178513] env[61905]: DEBUG oslo_vmware.api [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362487, 'name': ReconfigVM_Task, 'duration_secs': 0.541292} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.178921] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290024', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'name': 'volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a9ac365e-2be1-438d-a514-6fa7b26fa10c', 'attached_at': '', 'detached_at': '', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'serial': '2f6f8122-2df2-4b93-a09d-7ce9b009fece'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 860.199970] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ded5d6-d204-49ea-959d-894b95a2e3ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.209397] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cad116-3e9d-4e2e-bdbd-e538daa3a570 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.246768] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ad7fd2-61c1-463a-8385-2232ac5bd447 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.261211] env[61905]: DEBUG oslo_vmware.api [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362488, 'name': PowerOnVM_Task, 'duration_secs': 0.92732} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.262794] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a123cee0-3aff-43da-aabb-7460c36f5f60 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.267559] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.267765] env[61905]: INFO nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Took 9.10 seconds to spawn the instance on the hypervisor. [ 860.267985] env[61905]: DEBUG nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 860.269025] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9497bba3-e8d8-4661-bb6f-22a6160a0b4c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.287758] env[61905]: DEBUG nova.compute.provider_tree [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.290550] env[61905]: DEBUG nova.network.neutron [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Successfully created port: 63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.360911] env[61905]: DEBUG nova.network.neutron [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 860.406574] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362490, 'name': ReconfigVM_Task, 'duration_secs': 0.498134} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.406747] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 9a385d72-ba5d-48e0-b71f-d37d4e63c403/9a385d72-ba5d-48e0-b71f-d37d4e63c403.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.407693] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddc0db5c-72f0-4a71-9642-30f9d1f396fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.413758] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 860.413758] env[61905]: value = "task-1362491" [ 860.413758] env[61905]: _type = "Task" [ 860.413758] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.428105] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362491, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.566193] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362486, 'name': CreateVM_Task, 'duration_secs': 1.031452} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.566384] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.567352] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.567496] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.568049] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 860.568331] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c1c72c5-29e2-4b80-a97d-ff541974dbfb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.578753] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 860.578753] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5280743f-2254-dce0-cd84-0fe505a762f0" [ 860.578753] env[61905]: _type = "Task" [ 860.578753] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.592038] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5280743f-2254-dce0-cd84-0fe505a762f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.637398] env[61905]: DEBUG oslo_concurrency.lockutils [req-b04315c5-1962-4256-a586-8e6552203886 req-81644ff4-fc02-4ba5-9cba-ac1cd79da696 service nova] Releasing lock "refresh_cache-4bb7a2df-b472-4f6d-8a01-a55d0b86efda" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.679338] env[61905]: DEBUG nova.network.neutron [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Updating instance_info_cache with network_info: [{"id": "6fcd65df-60b2-46d9-bc62-81c3b64ca522", "address": "fa:16:3e:f9:68:20", "network": {"id": "1a5a610d-a8f9-44e9-b4d6-de59861ea2a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1733034426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad70dc5f83f24efe8ddd740d942f6362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fcd65df-60", "ovs_interfaceid": "6fcd65df-60b2-46d9-bc62-81c3b64ca522", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.796728] env[61905]: DEBUG nova.scheduler.client.report [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 860.814023] env[61905]: INFO nova.compute.manager [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Took 37.58 seconds to build instance. [ 860.824677] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 860.860119] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 860.860604] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 860.861420] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.861420] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 860.861591] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.861733] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 860.862202] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 860.862340] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 860.862668] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 860.862924] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 860.862995] env[61905]: DEBUG nova.virt.hardware [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.864282] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20616418-59ab-4f33-8a00-cd59a80c8b5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.874983] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9451e8-4b92-46da-8b48-a732e6b7d032 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.924915] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362491, 'name': Rename_Task, 'duration_secs': 0.235249} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.925326] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.925614] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad2ac935-c0d1-4f75-8245-db0fca2284ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.934610] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 860.934610] env[61905]: value = "task-1362492" [ 860.934610] env[61905]: _type = "Task" [ 860.934610] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.945638] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.089771] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5280743f-2254-dce0-cd84-0fe505a762f0, 'name': SearchDatastore_Task, 'duration_secs': 0.020995} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.090523] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.090659] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.090915] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.092344] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.092344] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.093041] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-963760be-11b8-41c1-8ee2-7c0f83f67d15 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.104548] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.104548] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.107553] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-414be1ad-e0d0-4cb7-b119-20056ed5dcfd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.115789] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 861.115789] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5206bb7d-c528-ad81-3266-b55ac5a9034d" [ 861.115789] env[61905]: _type = "Task" [ 861.115789] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.126235] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5206bb7d-c528-ad81-3266-b55ac5a9034d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.172841] env[61905]: DEBUG nova.compute.manager [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Received event network-changed-6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 861.172841] env[61905]: DEBUG nova.compute.manager [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Refreshing instance network info cache due to event network-changed-6fcd65df-60b2-46d9-bc62-81c3b64ca522. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 861.172841] env[61905]: DEBUG oslo_concurrency.lockutils [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] Acquiring lock "refresh_cache-020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.181817] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Releasing lock "refresh_cache-020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.182152] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Instance network_info: |[{"id": "6fcd65df-60b2-46d9-bc62-81c3b64ca522", "address": "fa:16:3e:f9:68:20", "network": {"id": "1a5a610d-a8f9-44e9-b4d6-de59861ea2a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1733034426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad70dc5f83f24efe8ddd740d942f6362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fcd65df-60", "ovs_interfaceid": "6fcd65df-60b2-46d9-bc62-81c3b64ca522", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 861.182439] env[61905]: DEBUG oslo_concurrency.lockutils [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] Acquired lock "refresh_cache-020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.182612] env[61905]: DEBUG nova.network.neutron [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Refreshing network info cache for port 6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 861.183818] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:68:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '34a581cb-6d33-4e2e-af50-735a6749d6da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fcd65df-60b2-46d9-bc62-81c3b64ca522', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.193734] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Creating folder: Project (ad70dc5f83f24efe8ddd740d942f6362). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.196133] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b822f88-cd97-468d-9ebf-55a32ebbf490 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.212203] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Created folder: Project (ad70dc5f83f24efe8ddd740d942f6362) in parent group-v289968. [ 861.212411] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Creating folder: Instances. Parent ref: group-v290033. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 861.212877] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d70b6725-58da-426d-b30d-c87976ee6c9a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.222645] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Created folder: Instances in parent group-v290033. [ 861.222903] env[61905]: DEBUG oslo.service.loopingcall [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.223121] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.223345] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85694bef-2001-40bc-b285-f8cc7dc15d97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.238842] env[61905]: DEBUG nova.objects.instance [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'flavor' on Instance uuid a9ac365e-2be1-438d-a514-6fa7b26fa10c {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 861.246156] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.246156] env[61905]: value = "task-1362495" [ 861.246156] env[61905]: _type = "Task" [ 861.246156] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.254380] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362495, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.307998] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.310780] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 861.312823] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.969s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.316332] env[61905]: INFO nova.compute.claims [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.318324] env[61905]: DEBUG oslo_concurrency.lockutils [None req-316d7155-dfb2-4636-8900-c38669ceefe0 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.887s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.397601] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.449294] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362492, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.628642] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5206bb7d-c528-ad81-3266-b55ac5a9034d, 'name': SearchDatastore_Task, 'duration_secs': 0.011888} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.629760] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cd7e604-d90f-44dc-8bb4-5fb62e5efa7c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.635562] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 861.635562] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5211d095-2107-2a23-cc1f-60d445df0a4a" [ 861.635562] env[61905]: _type = "Task" [ 861.635562] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.644679] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5211d095-2107-2a23-cc1f-60d445df0a4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.754020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9d30dccd-a218-4a97-a8f1-c698baa5b5a3 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.815s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.754020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.355s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.754020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.754020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.754020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.756852] env[61905]: INFO nova.compute.manager [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Terminating instance [ 861.761473] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362495, 'name': CreateVM_Task, 'duration_secs': 0.453071} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.764680] env[61905]: DEBUG nova.compute.manager [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.764935] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.765161] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.765410] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c033810d-311d-4e59-8e47-ae9da4744c20 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.768162] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.768329] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.768670] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 861.769524] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c550488-1d60-4772-a6dc-809573dbb3f1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.776122] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 861.776122] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52125bcc-239c-a020-62d9-e9d22d6ca188" [ 861.776122] env[61905]: _type = "Task" [ 861.776122] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.777138] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 861.777138] env[61905]: value = "task-1362496" [ 861.777138] env[61905]: _type = "Task" [ 861.777138] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.790155] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52125bcc-239c-a020-62d9-e9d22d6ca188, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.793609] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.819968] env[61905]: DEBUG nova.compute.utils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 861.823153] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 861.823369] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.828858] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.953288] env[61905]: DEBUG oslo_vmware.api [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362492, 'name': PowerOnVM_Task, 'duration_secs': 0.588742} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.953656] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.956306] env[61905]: INFO nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Took 8.38 seconds to spawn the instance on the hypervisor. [ 861.956306] env[61905]: DEBUG nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 861.956306] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4f9cc6-4586-457f-887e-4840509ca048 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.966697] env[61905]: DEBUG nova.policy [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cfd818932a44306bec0838cb58bf483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69666592007841459c3f8f9836ef4d7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.146741] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5211d095-2107-2a23-cc1f-60d445df0a4a, 'name': SearchDatastore_Task, 'duration_secs': 0.039308} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.147214] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.147302] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 4bb7a2df-b472-4f6d-8a01-a55d0b86efda/4bb7a2df-b472-4f6d-8a01-a55d0b86efda.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.147552] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b7cb505-de45-494f-b05d-8156ac280428 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.155740] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 862.155740] env[61905]: value = "task-1362497" [ 862.155740] env[61905]: _type = "Task" [ 862.155740] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.165993] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.289052] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52125bcc-239c-a020-62d9-e9d22d6ca188, 'name': SearchDatastore_Task, 'duration_secs': 0.019274} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.289796] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.290072] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.290320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.290472] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.290652] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 862.291067] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c98fdf4-8e1d-4254-b3c0-4c008bae0539 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.296084] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362496, 'name': PowerOffVM_Task, 'duration_secs': 0.288711} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.297075] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.297075] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 862.297245] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290024', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'name': 'volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a9ac365e-2be1-438d-a514-6fa7b26fa10c', 'attached_at': '', 'detached_at': '', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'serial': '2f6f8122-2df2-4b93-a09d-7ce9b009fece'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 862.298017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304be0c5-ebdd-4ab0-be48-17bf206ed559 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.303900] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 862.304138] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 862.326653] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2819d85-228f-4bd0-b5b9-5e3a1856849b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.331890] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19bf3aaf-db2d-4cae-bc02-4ee8b21b0d64 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.336337] env[61905]: DEBUG nova.network.neutron [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Updated VIF entry in instance network info cache for port 6fcd65df-60b2-46d9-bc62-81c3b64ca522. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.336727] env[61905]: DEBUG nova.network.neutron [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Updating instance_info_cache with network_info: [{"id": "6fcd65df-60b2-46d9-bc62-81c3b64ca522", "address": "fa:16:3e:f9:68:20", "network": {"id": "1a5a610d-a8f9-44e9-b4d6-de59861ea2a6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1733034426-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad70dc5f83f24efe8ddd740d942f6362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "34a581cb-6d33-4e2e-af50-735a6749d6da", "external-id": "nsx-vlan-transportzone-673", "segmentation_id": 673, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fcd65df-60", "ovs_interfaceid": "6fcd65df-60b2-46d9-bc62-81c3b64ca522", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.343184] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 862.353287] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 862.353287] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52629047-07e0-631f-54b3-1130d00384fb" [ 862.353287] env[61905]: _type = "Task" [ 862.353287] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.359743] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be63883-6693-4d23-8f84-5939765dd92a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.368740] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52629047-07e0-631f-54b3-1130d00384fb, 'name': SearchDatastore_Task, 'duration_secs': 0.019845} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.369825] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b13f8ee-e432-4b63-a192-7d303a730db6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.391532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.391532] env[61905]: DEBUG nova.scheduler.client.report [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 862.394399] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b29449d-dc83-466c-9430-7c435edf546b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.398659] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 862.398659] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ce6163-09bc-ef8c-9f51-056f82b2ebd7" [ 862.398659] env[61905]: _type = "Task" [ 862.398659] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.416193] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] The volume has not been displaced from its original location: [datastore1] volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece/volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 862.421441] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Reconfiguring VM instance instance-00000042 to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 862.422769] env[61905]: DEBUG nova.scheduler.client.report [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 862.422987] env[61905]: DEBUG nova.compute.provider_tree [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.425836] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-636ffe5b-1b52-4a9e-a60e-30a178a5e4d5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.443430] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ce6163-09bc-ef8c-9f51-056f82b2ebd7, 'name': SearchDatastore_Task, 'duration_secs': 0.015266} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.444124] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.444396] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e/020f97b7-e3e4-44e1-9ad2-97e3ed671f7e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.444672] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fecd599e-b89b-4492-ab44-d3e04db72a59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.449923] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 862.449923] env[61905]: value = "task-1362498" [ 862.449923] env[61905]: _type = "Task" [ 862.449923] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.454992] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 862.454992] env[61905]: value = "task-1362499" [ 862.454992] env[61905]: _type = "Task" [ 862.454992] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.455913] env[61905]: DEBUG nova.scheduler.client.report [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 862.465801] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.477763] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.478283] env[61905]: INFO nova.compute.manager [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Took 29.37 seconds to build instance. [ 862.484506] env[61905]: DEBUG nova.scheduler.client.report [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 862.671028] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362497, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.761602] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Successfully created port: 76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.832644] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b22eba-fd4f-4620-9f68-8450449656bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.841252] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fc5b82-610c-4779-9c4b-5067c4f7f716 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.850791] env[61905]: DEBUG oslo_concurrency.lockutils [req-f0204612-908a-4e96-a207-8c88b826eba5 req-d39b43e5-bb66-460f-a2db-367776e31d5d service nova] Releasing lock "refresh_cache-020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.901091] env[61905]: DEBUG nova.network.neutron [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Successfully updated port: 63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 862.902492] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1162d2e-cdd9-4259-9ac6-c6481f0ac517 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.914529] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8143a14b-6454-4424-bfb9-e73976525916 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.939185] env[61905]: DEBUG nova.compute.provider_tree [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.970870] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362498, 'name': ReconfigVM_Task, 'duration_secs': 0.322197} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.975075] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Reconfigured VM instance instance-00000042 to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 862.981881] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362499, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.982238] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9a4909cb-1a75-4422-b8ef-718151dbd6f7 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.517s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.982538] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8baa8262-8809-4dde-91a8-3b3d75d0aff4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.001557] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 863.001557] env[61905]: value = "task-1362500" [ 863.001557] env[61905]: _type = "Task" [ 863.001557] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.014368] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362500, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.169288] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.877791} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.169752] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 4bb7a2df-b472-4f6d-8a01-a55d0b86efda/4bb7a2df-b472-4f6d-8a01-a55d0b86efda.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.169866] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.170173] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d030b81e-349d-45b4-8cfe-fdf418173102 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.178182] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 863.178182] env[61905]: value = "task-1362501" [ 863.178182] env[61905]: _type = "Task" [ 863.178182] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.189659] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362501, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.354192] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 863.386188] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 863.386523] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 863.386820] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 863.387574] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19599012-009e-4ee2-9818-8fecc7295e86 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.405196] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9922ac36-f2d1-4aef-86e0-ab62940fe3ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.409678] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-e3b11ed6-b703-43a6-a528-28520ed43233" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.409811] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-e3b11ed6-b703-43a6-a528-28520ed43233" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.409991] env[61905]: DEBUG nova.network.neutron [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.443398] env[61905]: DEBUG nova.scheduler.client.report [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 863.470234] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.793064} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.470618] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e/020f97b7-e3e4-44e1-9ad2-97e3ed671f7e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.470947] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.471255] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1ef5c34-97cd-4a41-895a-9ae03fe1add1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.478808] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 863.478808] env[61905]: value = "task-1362502" [ 863.478808] env[61905]: _type = "Task" [ 863.478808] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.488771] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362502, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.517587] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362500, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.688802] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362501, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088836} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.689274] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.690133] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a95f44-a16d-4255-b931-d04ba7c32089 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.716317] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 4bb7a2df-b472-4f6d-8a01-a55d0b86efda/4bb7a2df-b472-4f6d-8a01-a55d0b86efda.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.716693] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aff85022-40c8-49d1-b56d-ddd5271f7205 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.733442] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.733641] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.738431] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 863.738431] env[61905]: value = "task-1362503" [ 863.738431] env[61905]: _type = "Task" [ 863.738431] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.747213] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.798794] env[61905]: DEBUG nova.compute.manager [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Received event network-vif-plugged-63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.799306] env[61905]: DEBUG oslo_concurrency.lockutils [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] Acquiring lock "e3b11ed6-b703-43a6-a528-28520ed43233-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.799584] env[61905]: DEBUG oslo_concurrency.lockutils [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] Lock "e3b11ed6-b703-43a6-a528-28520ed43233-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.799784] env[61905]: DEBUG oslo_concurrency.lockutils [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] Lock "e3b11ed6-b703-43a6-a528-28520ed43233-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.799968] env[61905]: DEBUG nova.compute.manager [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] No waiting events found dispatching network-vif-plugged-63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 863.800164] env[61905]: WARNING nova.compute.manager [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Received unexpected event network-vif-plugged-63b71759-5a81-4ed4-8ced-62e30a16037e for instance with vm_state building and task_state spawning. [ 863.800385] env[61905]: DEBUG nova.compute.manager [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Received event network-changed-63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.800999] env[61905]: DEBUG nova.compute.manager [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Refreshing instance network info cache due to event network-changed-63b71759-5a81-4ed4-8ced-62e30a16037e. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 863.801244] env[61905]: DEBUG oslo_concurrency.lockutils [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] Acquiring lock "refresh_cache-e3b11ed6-b703-43a6-a528-28520ed43233" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.940520] env[61905]: INFO nova.compute.manager [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Rebuilding instance [ 863.954381] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.954643] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 863.958648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.241s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.960739] env[61905]: INFO nova.compute.claims [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.997744] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362502, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.309575} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.998962] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.999708] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0aa3a69-b3b1-4a12-9527-85434e132f21 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.033939] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e/020f97b7-e3e4-44e1-9ad2-97e3ed671f7e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.035228] env[61905]: DEBUG nova.network.neutron [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.041625] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f639ec6b-af59-46d0-963c-0a1aa2035d3c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.059935] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362500, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.067059] env[61905]: DEBUG nova.compute.manager [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 864.068048] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405d1e03-7d87-41f5-b2ee-506c760bdd03 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.075331] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 864.075331] env[61905]: value = "task-1362504" [ 864.075331] env[61905]: _type = "Task" [ 864.075331] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.090218] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362504, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.251908] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.251908] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 864.251908] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 864.261821] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362503, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.288860] env[61905]: DEBUG nova.compute.manager [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Received event network-changed-e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 864.289532] env[61905]: DEBUG nova.compute.manager [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Refreshing instance network info cache due to event network-changed-e1eae77b-b6ce-4fab-9407-143d4f0555ff. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 864.289532] env[61905]: DEBUG oslo_concurrency.lockutils [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] Acquiring lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.289532] env[61905]: DEBUG oslo_concurrency.lockutils [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] Acquired lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.290138] env[61905]: DEBUG nova.network.neutron [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Refreshing network info cache for port e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.465487] env[61905]: DEBUG nova.compute.utils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.477642] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 864.477642] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.506735] env[61905]: DEBUG nova.network.neutron [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Updating instance_info_cache with network_info: [{"id": "63b71759-5a81-4ed4-8ced-62e30a16037e", "address": "fa:16:3e:c7:97:58", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b71759-5a", "ovs_interfaceid": "63b71759-5a81-4ed4-8ced-62e30a16037e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.526159] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362500, 'name': ReconfigVM_Task, 'duration_secs': 1.230779} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.526159] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290024', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'name': 'volume-2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a9ac365e-2be1-438d-a514-6fa7b26fa10c', 'attached_at': '', 'detached_at': '', 'volume_id': '2f6f8122-2df2-4b93-a09d-7ce9b009fece', 'serial': '2f6f8122-2df2-4b93-a09d-7ce9b009fece'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 864.526463] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 864.527700] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6945f70e-ef1d-4661-b47a-31de89f33299 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.539243] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 864.539672] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51951dde-99a0-4e34-98a0-8a6859e1d912 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.576253] env[61905]: DEBUG nova.policy [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cfd818932a44306bec0838cb58bf483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69666592007841459c3f8f9836ef4d7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 864.586983] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.593566] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cecd4ef-46ba-4926-a54d-191bfc28c75b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.599417] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362504, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.605989] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 864.605989] env[61905]: value = "task-1362506" [ 864.605989] env[61905]: _type = "Task" [ 864.605989] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.612343] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 864.612840] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 864.612947] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleting the datastore file [datastore1] a9ac365e-2be1-438d-a514-6fa7b26fa10c {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 864.613815] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04618447-9667-4ffe-b6e4-23b1e9a2515a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.623737] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.626643] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 864.626643] env[61905]: value = "task-1362507" [ 864.626643] env[61905]: _type = "Task" [ 864.626643] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.635955] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.753830] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362503, 'name': ReconfigVM_Task, 'duration_secs': 0.775102} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.758575] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Skipping network cache update for instance because it is being deleted. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 864.758575] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 864.758575] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 864.758769] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 864.759140] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 864.759140] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 864.759259] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 864.762715] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 4bb7a2df-b472-4f6d-8a01-a55d0b86efda/4bb7a2df-b472-4f6d-8a01-a55d0b86efda.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.763716] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55110838-e3e2-4624-bf2f-744b3899fcde {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.775413] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 864.775413] env[61905]: value = "task-1362508" [ 864.775413] env[61905]: _type = "Task" [ 864.775413] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.789688] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362508, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.790695] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.790828] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.791013] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 864.791363] env[61905]: DEBUG nova.objects.instance [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lazy-loading 'info_cache' on Instance uuid 60e68738-a333-44b2-a1e8-0b3da728059e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.975546] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 865.014743] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-e3b11ed6-b703-43a6-a528-28520ed43233" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.015256] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Instance network_info: |[{"id": "63b71759-5a81-4ed4-8ced-62e30a16037e", "address": "fa:16:3e:c7:97:58", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b71759-5a", "ovs_interfaceid": "63b71759-5a81-4ed4-8ced-62e30a16037e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 865.015786] env[61905]: DEBUG oslo_concurrency.lockutils [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] Acquired lock "refresh_cache-e3b11ed6-b703-43a6-a528-28520ed43233" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.016107] env[61905]: DEBUG nova.network.neutron [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Refreshing network info cache for port 63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.029746] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:97:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63b71759-5a81-4ed4-8ced-62e30a16037e', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.039178] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating folder: Project (82f1c8d91a7b4119bb32c82ef7bd940f). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.048764] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7efe19b1-b9a4-484e-a88b-34196cafd17c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.060598] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created folder: Project (82f1c8d91a7b4119bb32c82ef7bd940f) in parent group-v289968. [ 865.060598] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating folder: Instances. Parent ref: group-v290036. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 865.060598] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01d945a8-1d39-44cf-a007-8cf2b2ea8446 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.069206] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created folder: Instances in parent group-v290036. [ 865.069206] env[61905]: DEBUG oslo.service.loopingcall [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.069417] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.070076] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29821f0e-b904-4118-95aa-3f225f1214f3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.098690] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362504, 'name': ReconfigVM_Task, 'duration_secs': 0.55502} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.099802] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e/020f97b7-e3e4-44e1-9ad2-97e3ed671f7e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.103258] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.103258] env[61905]: value = "task-1362511" [ 865.103258] env[61905]: _type = "Task" [ 865.103258] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.103258] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-893a60ff-7a66-41d4-be1a-8a38561abc92 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.113515] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 865.113515] env[61905]: value = "task-1362512" [ 865.113515] env[61905]: _type = "Task" [ 865.113515] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.117666] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362511, 'name': CreateVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.126675] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.133866] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362512, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.139323] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.249020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.249334] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.249574] env[61905]: INFO nova.compute.manager [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Rebooting instance [ 865.286414] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362508, 'name': Rename_Task, 'duration_secs': 0.178873} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.289145] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.289593] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ef22a96-2639-4855-97a0-799f6faf2c16 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.297575] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 865.297575] env[61905]: value = "task-1362513" [ 865.297575] env[61905]: _type = "Task" [ 865.297575] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.311105] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.390595] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3ea8e8-7a9b-4eb9-93ed-62b27f896498 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.400076] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c350271-6952-47df-abaa-c037991b5bdc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.407612] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Successfully created port: 49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.435805] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2878f47f-2876-4ee6-90ca-0851a617f9c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.444360] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319c6892-54ed-45b4-8cee-93520b7bdcbd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.459089] env[61905]: DEBUG nova.compute.provider_tree [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.616126] env[61905]: DEBUG nova.network.neutron [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updated VIF entry in instance network info cache for port e1eae77b-b6ce-4fab-9407-143d4f0555ff. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.616126] env[61905]: DEBUG nova.network.neutron [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updating instance_info_cache with network_info: [{"id": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "address": "fa:16:3e:f3:57:fa", "network": {"id": "543f478c-b8f2-4bd5-9354-e98f24dc9594", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-715852827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9c343ce0dc043d4b39b04dc6bdc70aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1eae77b-b6", "ovs_interfaceid": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.620601] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362511, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.627635] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362506, 'name': PowerOffVM_Task, 'duration_secs': 0.758029} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.628433] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 865.628730] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.633016] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac6d1d8-afa8-4ffc-a21f-7f4881013b2a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.640527] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362512, 'name': Rename_Task, 'duration_secs': 0.255652} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.643767] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.644400] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d4b5da0-fffd-418a-8bdc-4622fa781f1f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.652256] env[61905]: DEBUG oslo_vmware.api [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.754536} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.652540] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 865.653199] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 865.653383] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 865.653559] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 865.653726] env[61905]: INFO nova.compute.manager [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Took 3.89 seconds to destroy the instance on the hypervisor. [ 865.653976] env[61905]: DEBUG oslo.service.loopingcall [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.654203] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6664750c-028c-4b1e-9685-8f222ee65d66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.655830] env[61905]: DEBUG nova.compute.manager [-] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 865.655935] env[61905]: DEBUG nova.network.neutron [-] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 865.662341] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 865.662341] env[61905]: value = "task-1362514" [ 865.662341] env[61905]: _type = "Task" [ 865.662341] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.672755] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.703044] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Successfully updated port: 76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.742046] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 865.742046] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 865.742293] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleting the datastore file [datastore1] a6e45dd1-e0ee-4bda-9513-4b1000e15e49 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 865.742996] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39a86edf-1746-467e-a809-4f7c286cae0a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.749666] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 865.749666] env[61905]: value = "task-1362516" [ 865.749666] env[61905]: _type = "Task" [ 865.749666] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.761553] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.780541] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.813372] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362513, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.848830] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.964932] env[61905]: DEBUG nova.scheduler.client.report [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 865.997820] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 866.030370] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.030693] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.030893] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.031138] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.031325] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.031493] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.031775] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.031987] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.032213] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.032419] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.033410] env[61905]: DEBUG nova.virt.hardware [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.035370] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dce77b-2b29-4f37-a76c-ef8ab5a6cff3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.044467] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91af958-6245-4653-85f0-bdfc0a502dbf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.115540] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362511, 'name': CreateVM_Task, 'duration_secs': 0.577403} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.116042] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.116890] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.116890] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.117222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.117545] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3570058-9f4b-4c97-bb04-3b5caf2da52a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.122634] env[61905]: DEBUG oslo_concurrency.lockutils [req-4de9038b-1aa0-4595-ba90-83460ce172d9 req-9cd1df60-5f03-4276-af47-b442a410b4c0 service nova] Releasing lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.123116] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 866.123116] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525a9b34-a8ea-5a6d-72e5-ab9ec1a1d73b" [ 866.123116] env[61905]: _type = "Task" [ 866.123116] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.123371] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquired lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.123754] env[61905]: DEBUG nova.network.neutron [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.133012] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525a9b34-a8ea-5a6d-72e5-ab9ec1a1d73b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.160137] env[61905]: DEBUG nova.network.neutron [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Updated VIF entry in instance network info cache for port 63b71759-5a81-4ed4-8ced-62e30a16037e. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.160521] env[61905]: DEBUG nova.network.neutron [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Updating instance_info_cache with network_info: [{"id": "63b71759-5a81-4ed4-8ced-62e30a16037e", "address": "fa:16:3e:c7:97:58", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b71759-5a", "ovs_interfaceid": "63b71759-5a81-4ed4-8ced-62e30a16037e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.171465] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362514, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.204932] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "refresh_cache-38b80675-182a-422c-9222-aa78ed59c351" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.205213] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "refresh_cache-38b80675-182a-422c-9222-aa78ed59c351" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.205464] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.262131] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237428} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.262131] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 866.262131] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 866.262589] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 866.314162] env[61905]: DEBUG oslo_vmware.api [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362513, 'name': PowerOnVM_Task, 'duration_secs': 0.638521} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.314458] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.314654] env[61905]: INFO nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Took 10.34 seconds to spawn the instance on the hypervisor. [ 866.314871] env[61905]: DEBUG nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.315769] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c74ede-291e-4b52-9f3a-579abe2c4d18 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.320552] env[61905]: DEBUG nova.compute.manager [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Received event network-vif-plugged-76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.320794] env[61905]: DEBUG oslo_concurrency.lockutils [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] Acquiring lock "38b80675-182a-422c-9222-aa78ed59c351-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.321020] env[61905]: DEBUG oslo_concurrency.lockutils [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] Lock "38b80675-182a-422c-9222-aa78ed59c351-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.321194] env[61905]: DEBUG oslo_concurrency.lockutils [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] Lock "38b80675-182a-422c-9222-aa78ed59c351-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.321357] env[61905]: DEBUG nova.compute.manager [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] No waiting events found dispatching network-vif-plugged-76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.321525] env[61905]: WARNING nova.compute.manager [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Received unexpected event network-vif-plugged-76047de4-e7fa-4434-9ef3-4dcefcdf9f3f for instance with vm_state building and task_state spawning. [ 866.321687] env[61905]: DEBUG nova.compute.manager [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Received event network-changed-76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.321849] env[61905]: DEBUG nova.compute.manager [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Refreshing instance network info cache due to event network-changed-76047de4-e7fa-4434-9ef3-4dcefcdf9f3f. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 866.322032] env[61905]: DEBUG oslo_concurrency.lockutils [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] Acquiring lock "refresh_cache-38b80675-182a-422c-9222-aa78ed59c351" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.447764] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.470945] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.471611] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 866.474378] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.639s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.474541] env[61905]: DEBUG nova.objects.instance [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lazy-loading 'resources' on Instance uuid aeb72a57-d319-479d-a1c7-3cebc6f73f09 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.546109] env[61905]: DEBUG nova.compute.manager [req-6da302c2-6b19-49be-a8f8-d3f817046b76 req-a38a9c56-6279-49e5-a5a3-a43c65b75f80 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Received event network-vif-deleted-1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.546286] env[61905]: INFO nova.compute.manager [req-6da302c2-6b19-49be-a8f8-d3f817046b76 req-a38a9c56-6279-49e5-a5a3-a43c65b75f80 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Neutron deleted interface 1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb; detaching it from the instance and deleting it from the info cache [ 866.546459] env[61905]: DEBUG nova.network.neutron [req-6da302c2-6b19-49be-a8f8-d3f817046b76 req-a38a9c56-6279-49e5-a5a3-a43c65b75f80 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.638207] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525a9b34-a8ea-5a6d-72e5-ab9ec1a1d73b, 'name': SearchDatastore_Task, 'duration_secs': 0.014337} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.639538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.639538] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.639538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.639538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.639538] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.639885] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c922a44-609d-4b50-8012-ee031a611802 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.650859] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.651073] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.651827] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa7a25fc-f168-4233-8bc1-b44cfd82b66c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.658941] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 866.658941] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5288502b-6df7-b77f-46ed-03d18b8546bb" [ 866.658941] env[61905]: _type = "Task" [ 866.658941] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.666949] env[61905]: DEBUG oslo_concurrency.lockutils [req-3e2b02c0-2f11-4c85-b3f8-5a0cd1dffeb2 req-26f638da-d353-4a97-bf1e-620c883c8cc6 service nova] Releasing lock "refresh_cache-e3b11ed6-b703-43a6-a528-28520ed43233" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.674741] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5288502b-6df7-b77f-46ed-03d18b8546bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.677868] env[61905]: DEBUG oslo_vmware.api [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362514, 'name': PowerOnVM_Task, 'duration_secs': 0.55249} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.677868] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.678131] env[61905]: INFO nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Took 8.33 seconds to spawn the instance on the hypervisor. [ 866.678400] env[61905]: DEBUG nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.679510] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9834bdfd-fbad-495a-9124-b8adcc1e4332 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.746461] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.843026] env[61905]: INFO nova.compute.manager [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Took 30.84 seconds to build instance. [ 866.947290] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Updating instance_info_cache with network_info: [{"id": "76047de4-e7fa-4434-9ef3-4dcefcdf9f3f", "address": "fa:16:3e:ed:33:1f", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76047de4-e7", "ovs_interfaceid": "76047de4-e7fa-4434-9ef3-4dcefcdf9f3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.950611] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-60e68738-a333-44b2-a1e8-0b3da728059e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.951711] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 866.951711] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.951711] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.951711] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.951711] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.952459] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.952459] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.952459] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 866.952459] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.978022] env[61905]: DEBUG nova.compute.utils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 866.985993] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 866.985993] env[61905]: DEBUG nova.network.neutron [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.026996] env[61905]: DEBUG nova.network.neutron [-] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.050558] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3818de6-0b05-4abe-a5c3-61afcda7fe71 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.068140] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228df5a5-ec9a-4629-9721-bc13cf1f2656 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.086903] env[61905]: DEBUG nova.policy [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0fc3bde6f3748df8116a36b9f7260b0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a78ffb1a94ca4220a39c68529eb5693d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 867.106380] env[61905]: DEBUG nova.compute.manager [req-6da302c2-6b19-49be-a8f8-d3f817046b76 req-a38a9c56-6279-49e5-a5a3-a43c65b75f80 service nova] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Detach interface failed, port_id=1676e09a-a6c6-4c1c-8d0e-5d094dcfbfeb, reason: Instance a9ac365e-2be1-438d-a514-6fa7b26fa10c could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 867.124993] env[61905]: DEBUG nova.network.neutron [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updating instance_info_cache with network_info: [{"id": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "address": "fa:16:3e:f3:57:fa", "network": {"id": "543f478c-b8f2-4bd5-9354-e98f24dc9594", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-715852827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9c343ce0dc043d4b39b04dc6bdc70aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1eae77b-b6", "ovs_interfaceid": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.172389] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5288502b-6df7-b77f-46ed-03d18b8546bb, 'name': SearchDatastore_Task, 'duration_secs': 0.016175} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.176538] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7170e1b0-ff39-4c2d-b761-e0c787532117 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.183026] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 867.183026] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5200a683-c566-6b1b-84f1-5e23444bd8e9" [ 867.183026] env[61905]: _type = "Task" [ 867.183026] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.198382] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5200a683-c566-6b1b-84f1-5e23444bd8e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.200706] env[61905]: INFO nova.compute.manager [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Took 29.73 seconds to build instance. [ 867.305145] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 867.305693] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 867.305693] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.305856] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 867.306020] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.306217] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 867.306469] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 867.306966] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 867.307233] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 867.307432] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 867.307779] env[61905]: DEBUG nova.virt.hardware [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 867.308543] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a5cd29-b1d1-4e97-97d3-e8cb9649452a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.321222] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba807fe-d6ee-4efb-95b6-088053ec021c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.336183] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:8a:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91bfd878-6423-4f5a-9645-1fb1d70de825', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.344333] env[61905]: DEBUG oslo.service.loopingcall [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.347977] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.348393] env[61905]: DEBUG oslo_concurrency.lockutils [None req-00804de9-83b8-4f95-a8c1-d58088f0d101 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.032s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.348910] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a04c959a-cfa8-4f63-a36c-0f805b585171 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.371124] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.371124] env[61905]: value = "task-1362517" [ 867.371124] env[61905]: _type = "Task" [ 867.371124] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.379079] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae2a0bb-c0c3-411c-9b9b-1e2e6758ad61 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.394542] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345962b2-7cb2-41f8-a679-442bbc6cf0fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.398506] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362517, 'name': CreateVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.440482] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41070c8b-de1b-4cca-b8a7-c770f6c93ee4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.448550] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfc3258-9942-407e-9089-5282ec01837d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.455032] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "refresh_cache-38b80675-182a-422c-9222-aa78ed59c351" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.455032] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Instance network_info: |[{"id": "76047de4-e7fa-4434-9ef3-4dcefcdf9f3f", "address": "fa:16:3e:ed:33:1f", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76047de4-e7", "ovs_interfaceid": "76047de4-e7fa-4434-9ef3-4dcefcdf9f3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 867.455032] env[61905]: DEBUG oslo_concurrency.lockutils [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] Acquired lock "refresh_cache-38b80675-182a-422c-9222-aa78ed59c351" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.455032] env[61905]: DEBUG nova.network.neutron [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Refreshing network info cache for port 76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.455331] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:33:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76047de4-e7fa-4434-9ef3-4dcefcdf9f3f', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.462714] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Creating folder: Project (69666592007841459c3f8f9836ef4d7a). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.464387] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.464661] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2937037-a513-4d7b-96f7-080c12f3b7c8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.475936] env[61905]: DEBUG nova.compute.provider_tree [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.487494] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Created folder: Project (69666592007841459c3f8f9836ef4d7a) in parent group-v289968. [ 867.487494] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Creating folder: Instances. Parent ref: group-v290040. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.487494] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc3e502c-16af-4dec-a218-754265403761 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.489727] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 867.501350] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Created folder: Instances in parent group-v290040. [ 867.501595] env[61905]: DEBUG oslo.service.loopingcall [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.501794] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.502384] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de7565bd-3dbf-47cb-89c0-31ad9c8e39ce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.523863] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.523863] env[61905]: value = "task-1362520" [ 867.523863] env[61905]: _type = "Task" [ 867.523863] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.533177] env[61905]: INFO nova.compute.manager [-] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Took 1.88 seconds to deallocate network for instance. [ 867.534069] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362520, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.631472] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Releasing lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.633966] env[61905]: DEBUG nova.compute.manager [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.635016] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea090ad5-547f-4361-8f33-a8cdeefa037b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.698613] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5200a683-c566-6b1b-84f1-5e23444bd8e9, 'name': SearchDatastore_Task, 'duration_secs': 0.023807} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.699063] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.699359] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e3b11ed6-b703-43a6-a528-28520ed43233/e3b11ed6-b703-43a6-a528-28520ed43233.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.699701] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b1c1bd2-12e1-436e-92be-f620a2179393 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.705977] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a44cbf7c-9e1c-4f4d-925c-1ca8905d1634 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.681s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.714310] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 867.714310] env[61905]: value = "task-1362521" [ 867.714310] env[61905]: _type = "Task" [ 867.714310] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.726062] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.788534] env[61905]: DEBUG nova.network.neutron [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Successfully created port: 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.886164] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362517, 'name': CreateVM_Task, 'duration_secs': 0.425503} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.886398] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.887348] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.887589] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.887989] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 867.890072] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d46612bb-8a66-4b7b-868e-5f2d19ac3ccf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.897463] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 867.897463] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b34774-f4f9-6416-2d60-f701b9d42fec" [ 867.897463] env[61905]: _type = "Task" [ 867.897463] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.907540] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b34774-f4f9-6416-2d60-f701b9d42fec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.908457] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Successfully updated port: 49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.980961] env[61905]: DEBUG nova.scheduler.client.report [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 868.040122] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362520, 'name': CreateVM_Task, 'duration_secs': 0.433492} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.043466] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.045079] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.127796] env[61905]: INFO nova.compute.manager [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Took 0.59 seconds to detach 1 volumes for instance. [ 868.227725] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362521, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.287969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.288254] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.410360] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b34774-f4f9-6416-2d60-f701b9d42fec, 'name': SearchDatastore_Task, 'duration_secs': 0.014483} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.410822] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "refresh_cache-2c919b69-0e09-431d-8a75-98d5740c7dab" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.410955] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "refresh_cache-2c919b69-0e09-431d-8a75-98d5740c7dab" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.411117] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 868.412262] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.412493] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.412716] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.412891] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.413102] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.416218] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.416531] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.416784] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1584858-976c-4033-8b93-b6ea1dd4b7ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.418890] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52622822-f2e9-4716-b077-fcf56c32c00f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.426859] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 868.426859] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f25ad6-2e69-ff0c-baad-6830aba95e5d" [ 868.426859] env[61905]: _type = "Task" [ 868.426859] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.432856] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.432856] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.436641] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25b32111-0ad2-4515-8c0b-a7479e4396c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.439138] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f25ad6-2e69-ff0c-baad-6830aba95e5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.444814] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 868.444814] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f360e7-715f-96a6-f418-8364c4129789" [ 868.444814] env[61905]: _type = "Task" [ 868.444814] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.450749] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f360e7-715f-96a6-f418-8364c4129789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.487619] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.497482] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.680s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.504466] env[61905]: INFO nova.compute.claims [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.505560] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 868.535338] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 868.535338] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 868.535338] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 868.535338] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 868.535512] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 868.535695] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 868.536148] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 868.536148] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 868.536361] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 868.536741] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 868.536993] env[61905]: DEBUG nova.virt.hardware [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.538253] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820ecf3d-5bd0-4f51-a4db-93ff727be7db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.544367] env[61905]: INFO nova.scheduler.client.report [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleted allocations for instance aeb72a57-d319-479d-a1c7-3cebc6f73f09 [ 868.557423] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbce2c2-cc04-487f-8f0d-60b3346bf016 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.571464] env[61905]: DEBUG nova.network.neutron [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Updated VIF entry in instance network info cache for port 76047de4-e7fa-4434-9ef3-4dcefcdf9f3f. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 868.571833] env[61905]: DEBUG nova.network.neutron [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Updating instance_info_cache with network_info: [{"id": "76047de4-e7fa-4434-9ef3-4dcefcdf9f3f", "address": "fa:16:3e:ed:33:1f", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76047de4-e7", "ovs_interfaceid": "76047de4-e7fa-4434-9ef3-4dcefcdf9f3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.635333] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.658834] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc17f07-1223-479a-a5c3-c6c92ef2ff75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.667320] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Doing hard reboot of VM {{(pid=61905) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 868.667892] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-7acfd3ce-607f-4ca3-9656-287e398f9045 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.674714] env[61905]: DEBUG oslo_vmware.api [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 868.674714] env[61905]: value = "task-1362522" [ 868.674714] env[61905]: _type = "Task" [ 868.674714] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.684770] env[61905]: DEBUG oslo_vmware.api [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362522, 'name': ResetVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.723747] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594055} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.724521] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e3b11ed6-b703-43a6-a528-28520ed43233/e3b11ed6-b703-43a6-a528-28520ed43233.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.724984] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.725405] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03f6d2ce-12e9-4a9d-acd9-d4127e23b72a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.733463] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 868.733463] env[61905]: value = "task-1362523" [ 868.733463] env[61905]: _type = "Task" [ 868.733463] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.743890] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.767470] env[61905]: DEBUG nova.compute.manager [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Received event network-vif-plugged-49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.768362] env[61905]: DEBUG oslo_concurrency.lockutils [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] Acquiring lock "2c919b69-0e09-431d-8a75-98d5740c7dab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.768362] env[61905]: DEBUG oslo_concurrency.lockutils [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.768362] env[61905]: DEBUG oslo_concurrency.lockutils [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.768600] env[61905]: DEBUG nova.compute.manager [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] No waiting events found dispatching network-vif-plugged-49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 868.768846] env[61905]: WARNING nova.compute.manager [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Received unexpected event network-vif-plugged-49055aca-f39c-4b95-b186-b9007d03fde8 for instance with vm_state building and task_state spawning. [ 868.768922] env[61905]: DEBUG nova.compute.manager [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Received event network-changed-49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.770704] env[61905]: DEBUG nova.compute.manager [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Refreshing instance network info cache due to event network-changed-49055aca-f39c-4b95-b186-b9007d03fde8. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.770704] env[61905]: DEBUG oslo_concurrency.lockutils [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] Acquiring lock "refresh_cache-2c919b69-0e09-431d-8a75-98d5740c7dab" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.791742] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.911169] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527128a4-7868-d0c0-99fe-c9bddc3f2060/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 868.912495] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba56f9df-257c-44aa-a62b-b2cda5da692d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.930133] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527128a4-7868-d0c0-99fe-c9bddc3f2060/disk-0.vmdk is in state: ready. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 868.930133] env[61905]: ERROR oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527128a4-7868-d0c0-99fe-c9bddc3f2060/disk-0.vmdk due to incomplete transfer. [ 868.931556] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-20b8a10e-e72d-4cfb-8900-58979623c5eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.940935] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f25ad6-2e69-ff0c-baad-6830aba95e5d, 'name': SearchDatastore_Task, 'duration_secs': 0.016734} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.942417] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.942809] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.942879] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.943483] env[61905]: DEBUG oslo_vmware.rw_handles [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527128a4-7868-d0c0-99fe-c9bddc3f2060/disk-0.vmdk. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 868.943483] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Uploaded image aef12d3a-bf73-4159-9d46-dd0a3ceba133 to the Glance image server {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 868.946437] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Destroying the VM {{(pid=61905) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 868.946437] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-169a7cd5-75cc-43c6-85f1-94cdd1255ace {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.965543] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f360e7-715f-96a6-f418-8364c4129789, 'name': SearchDatastore_Task, 'duration_secs': 0.014931} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.965904] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 868.965904] env[61905]: value = "task-1362524" [ 868.965904] env[61905]: _type = "Task" [ 868.965904] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.967338] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d88a412-d354-48e0-a6e7-da0af1d12721 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.973917] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.980329] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 868.980329] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529a56ba-483c-ca62-0064-1416d589508f" [ 868.980329] env[61905]: _type = "Task" [ 868.980329] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.984717] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362524, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.995568] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529a56ba-483c-ca62-0064-1416d589508f, 'name': SearchDatastore_Task, 'duration_secs': 0.012806} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.995848] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.996602] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.996602] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.996790] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.996902] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f34449a3-eb52-45c6-8a0f-f320b02b495b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.002154] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49cf2fad-611a-4990-9fc7-d32547da66ee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.011727] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 869.011727] env[61905]: value = "task-1362525" [ 869.011727] env[61905]: _type = "Task" [ 869.011727] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.013318] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.013506] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.018596] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ef070a9-d380-4507-8e68-490c3b6bdb5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.028028] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 869.028028] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a6946-e27c-d5ad-222a-1ec9855e4ab8" [ 869.028028] env[61905]: _type = "Task" [ 869.028028] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.028319] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.038778] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a6946-e27c-d5ad-222a-1ec9855e4ab8, 'name': SearchDatastore_Task, 'duration_secs': 0.010343} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.039378] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4da70def-f592-4a82-ac1c-35ef7ebe819f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.044611] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 869.044611] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d5b6f0-378d-891b-66b1-4e97869b8707" [ 869.044611] env[61905]: _type = "Task" [ 869.044611] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.057348] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d5b6f0-378d-891b-66b1-4e97869b8707, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.057893] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7781d0de-ac5d-48b5-8b7e-79ee293ca23a tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "aeb72a57-d319-479d-a1c7-3cebc6f73f09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.676s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.074607] env[61905]: DEBUG oslo_concurrency.lockutils [req-4457df05-1aff-41a8-a6cb-5e03580e6934 req-3abe02d1-9ab4-4ec3-a899-4f1ffd7adc01 service nova] Releasing lock "refresh_cache-38b80675-182a-422c-9222-aa78ed59c351" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.186178] env[61905]: DEBUG oslo_vmware.api [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362522, 'name': ResetVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.243824] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077206} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.244400] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.245263] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757568df-85e8-4f2c-93ba-97fe29fcfed2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.272525] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] e3b11ed6-b703-43a6-a528-28520ed43233/e3b11ed6-b703-43a6-a528-28520ed43233.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.273973] env[61905]: DEBUG nova.network.neutron [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Updating instance_info_cache with network_info: [{"id": "49055aca-f39c-4b95-b186-b9007d03fde8", "address": "fa:16:3e:d2:9d:1d", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49055aca-f3", "ovs_interfaceid": "49055aca-f39c-4b95-b186-b9007d03fde8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.276671] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d220fb01-262d-4c29-a5ff-897e25b459da {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.294399] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "refresh_cache-2c919b69-0e09-431d-8a75-98d5740c7dab" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.294973] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Instance network_info: |[{"id": "49055aca-f39c-4b95-b186-b9007d03fde8", "address": "fa:16:3e:d2:9d:1d", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49055aca-f3", "ovs_interfaceid": "49055aca-f39c-4b95-b186-b9007d03fde8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 869.298148] env[61905]: DEBUG oslo_concurrency.lockutils [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] Acquired lock "refresh_cache-2c919b69-0e09-431d-8a75-98d5740c7dab" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.298483] env[61905]: DEBUG nova.network.neutron [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Refreshing network info cache for port 49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.300437] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:9d:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49055aca-f39c-4b95-b186-b9007d03fde8', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.309582] env[61905]: DEBUG oslo.service.loopingcall [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.316557] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.316929] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.318343] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.318343] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.318343] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.321867] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.322423] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2acddc31-6c6c-4380-801d-514057a59c9a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.339791] env[61905]: INFO nova.compute.manager [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Terminating instance [ 869.345907] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 869.345907] env[61905]: value = "task-1362526" [ 869.345907] env[61905]: _type = "Task" [ 869.345907] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.346755] env[61905]: DEBUG nova.compute.manager [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 869.346950] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.348754] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb95cb5-a158-4f01-a76f-425dc1196b79 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.356900] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.356900] env[61905]: value = "task-1362527" [ 869.356900] env[61905]: _type = "Task" [ 869.356900] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.364879] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.367416] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.367708] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 869.368316] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68683441-c871-4c01-b0f4-747b08ba83d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.373719] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362527, 'name': CreateVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.374773] env[61905]: DEBUG oslo_vmware.api [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 869.374773] env[61905]: value = "task-1362528" [ 869.374773] env[61905]: _type = "Task" [ 869.374773] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.384103] env[61905]: DEBUG oslo_vmware.api [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.477257] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362524, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.529923] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362525, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.563730] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d5b6f0-378d-891b-66b1-4e97869b8707, 'name': SearchDatastore_Task, 'duration_secs': 0.041288} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.567967] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.568375] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 38b80675-182a-422c-9222-aa78ed59c351/38b80675-182a-422c-9222-aa78ed59c351.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.569073] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1113ca3-5cf7-42a0-b559-759c5e437fa8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.576969] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 869.576969] env[61905]: value = "task-1362529" [ 869.576969] env[61905]: _type = "Task" [ 869.576969] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.587374] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362529, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.626378] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.626887] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.688935] env[61905]: DEBUG oslo_vmware.api [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362522, 'name': ResetVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.865039] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.874747] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362527, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.882331] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fba967-fed9-4ce7-96e4-b1412f8ecc62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.889680] env[61905]: DEBUG oslo_vmware.api [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362528, 'name': PowerOffVM_Task, 'duration_secs': 0.204576} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.890614] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.891441] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.893678] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97fb8469-6396-444e-9c27-1e0601941bdc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.900210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9750d206-27d9-4a3c-8d6f-8a94d075cd24 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.935666] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fbcb31-c85a-4b99-a6a0-2c28f2c9658b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.945075] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9d2052-b5ec-4522-a7cf-560c5f91a5b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.965635] env[61905]: DEBUG nova.compute.provider_tree [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.989061] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362524, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.003295] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 870.003425] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 870.003607] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Deleting the datastore file [datastore2] 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 870.004280] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c335c96e-1c35-4022-967b-5dd03fb84290 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.016086] env[61905]: DEBUG oslo_vmware.api [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for the task: (returnval){ [ 870.016086] env[61905]: value = "task-1362531" [ 870.016086] env[61905]: _type = "Task" [ 870.016086] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.025429] env[61905]: DEBUG nova.network.neutron [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Updated VIF entry in instance network info cache for port 49055aca-f39c-4b95-b186-b9007d03fde8. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.025872] env[61905]: DEBUG nova.network.neutron [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Updating instance_info_cache with network_info: [{"id": "49055aca-f39c-4b95-b186-b9007d03fde8", "address": "fa:16:3e:d2:9d:1d", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49055aca-f3", "ovs_interfaceid": "49055aca-f39c-4b95-b186-b9007d03fde8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.035038] env[61905]: DEBUG oslo_vmware.api [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.038989] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362525, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812973} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.040531] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.040868] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.041385] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2747edd2-63e9-4a02-9f3b-caf8016a58c2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.049064] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 870.049064] env[61905]: value = "task-1362532" [ 870.049064] env[61905]: _type = "Task" [ 870.049064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.060472] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362532, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.089300] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362529, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.130268] env[61905]: DEBUG nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 870.189899] env[61905]: DEBUG oslo_vmware.api [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362522, 'name': ResetVM_Task, 'duration_secs': 1.109952} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.190320] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Did hard reboot of VM {{(pid=61905) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 870.190320] env[61905]: DEBUG nova.compute.manager [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.191205] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf915eb-68cb-4fdb-a7fd-298f520fee62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.251389] env[61905]: DEBUG nova.network.neutron [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Successfully updated port: 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 870.360763] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362526, 'name': ReconfigVM_Task, 'duration_secs': 0.744619} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.363833] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Reconfigured VM instance instance-00000048 to attach disk [datastore2] e3b11ed6-b703-43a6-a528-28520ed43233/e3b11ed6-b703-43a6-a528-28520ed43233.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.364516] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4db3fac-4d28-4aa3-a05c-1e5bd36f7bf8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.370855] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362527, 'name': CreateVM_Task, 'duration_secs': 0.662188} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.371903] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.372227] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 870.372227] env[61905]: value = "task-1362533" [ 870.372227] env[61905]: _type = "Task" [ 870.372227] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.372829] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.372992] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.373319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 870.373594] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da42fd99-7be8-40bd-82ea-e43cba16a257 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.380198] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 870.380198] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a07ef2-3cb0-6212-0fbb-7bdd72a54784" [ 870.380198] env[61905]: _type = "Task" [ 870.380198] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.383261] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362533, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.395839] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a07ef2-3cb0-6212-0fbb-7bdd72a54784, 'name': SearchDatastore_Task, 'duration_secs': 0.008539} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.396469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.396516] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.396767] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.396931] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.397132] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.397404] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f239903-1750-4582-88e5-1f254576c222 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.411032] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.411032] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.411631] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0190025-ea22-4a70-8ad0-da6b1f0fb0bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.417155] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 870.417155] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c4ea7e-0c5a-c03a-b6c9-b301a68b893f" [ 870.417155] env[61905]: _type = "Task" [ 870.417155] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.426605] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c4ea7e-0c5a-c03a-b6c9-b301a68b893f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.473389] env[61905]: DEBUG nova.scheduler.client.report [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.485415] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362524, 'name': Destroy_Task, 'duration_secs': 1.4799} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.485593] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Destroyed the VM [ 870.485908] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Deleting Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 870.485908] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-530e5dcf-8c1a-408d-924e-d05feeb77829 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.493371] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 870.493371] env[61905]: value = "task-1362534" [ 870.493371] env[61905]: _type = "Task" [ 870.493371] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.502605] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362534, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.525316] env[61905]: DEBUG oslo_vmware.api [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Task: {'id': task-1362531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331022} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.525568] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.525759] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.525928] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.526165] env[61905]: INFO nova.compute.manager [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 870.526409] env[61905]: DEBUG oslo.service.loopingcall [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.526594] env[61905]: DEBUG nova.compute.manager [-] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 870.526768] env[61905]: DEBUG nova.network.neutron [-] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.530514] env[61905]: DEBUG oslo_concurrency.lockutils [req-9c5a3c66-62d3-4373-8809-241ec1fdd742 req-832afd98-0c32-4c3c-a4bd-2d2548b12d80 service nova] Releasing lock "refresh_cache-2c919b69-0e09-431d-8a75-98d5740c7dab" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.558524] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362532, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.212097} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.558858] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.559681] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e60ba96-11e3-4d06-bf48-93ca35552413 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.586262] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.588800] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-008e52b7-1941-4fa9-a56d-cfa9838ee005 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.614063] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362529, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657225} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.615712] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 38b80675-182a-422c-9222-aa78ed59c351/38b80675-182a-422c-9222-aa78ed59c351.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.615927] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.616330] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 870.616330] env[61905]: value = "task-1362535" [ 870.616330] env[61905]: _type = "Task" [ 870.616330] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.616579] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6bf764f-e34d-4d0f-a444-8903de4e8716 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.628463] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362535, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.630078] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 870.630078] env[61905]: value = "task-1362536" [ 870.630078] env[61905]: _type = "Task" [ 870.630078] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.639718] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.663873] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.704789] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ecc160d-9ee2-48ae-879c-9c4866d73c52 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.455s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.757974] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.758157] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.758318] env[61905]: DEBUG nova.network.neutron [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.885215] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362533, 'name': Rename_Task, 'duration_secs': 0.269791} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.885497] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.885747] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eae4ad13-9361-4240-99d7-47c305833d51 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.892250] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 870.892250] env[61905]: value = "task-1362537" [ 870.892250] env[61905]: _type = "Task" [ 870.892250] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.905809] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362537, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.932586] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c4ea7e-0c5a-c03a-b6c9-b301a68b893f, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.934059] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86f28b12-d74e-4756-9073-1488d8dfa3d5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.940616] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 870.940616] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5229a74b-3440-e67c-388e-a348d42d3579" [ 870.940616] env[61905]: _type = "Task" [ 870.940616] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.952533] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5229a74b-3440-e67c-388e-a348d42d3579, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.960875] env[61905]: DEBUG nova.compute.manager [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-vif-plugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.961275] env[61905]: DEBUG oslo_concurrency.lockutils [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.961514] env[61905]: DEBUG oslo_concurrency.lockutils [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.961691] env[61905]: DEBUG oslo_concurrency.lockutils [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.961882] env[61905]: DEBUG nova.compute.manager [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] No waiting events found dispatching network-vif-plugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 870.962355] env[61905]: WARNING nova.compute.manager [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received unexpected event network-vif-plugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 for instance with vm_state building and task_state spawning. [ 870.962430] env[61905]: DEBUG nova.compute.manager [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.962798] env[61905]: DEBUG nova.compute.manager [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing instance network info cache due to event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 870.963715] env[61905]: DEBUG oslo_concurrency.lockutils [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.981993] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.982563] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 870.985750] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.581s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.987331] env[61905]: INFO nova.compute.claims [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.004993] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362534, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.128226] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362535, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.138454] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074709} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.138783] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.139739] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32af4ec1-10cb-42e0-b98e-44f14d7a9089 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.173075] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 38b80675-182a-422c-9222-aa78ed59c351/38b80675-182a-422c-9222-aa78ed59c351.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.173798] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-525af3b6-2758-46be-b139-3b4550877b94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.196386] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 871.196386] env[61905]: value = "task-1362538" [ 871.196386] env[61905]: _type = "Task" [ 871.196386] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.209546] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362538, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.308628] env[61905]: DEBUG nova.network.neutron [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 871.374907] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.375112] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.375320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.375499] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.375666] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.378310] env[61905]: INFO nova.compute.manager [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Terminating instance [ 871.379650] env[61905]: DEBUG nova.network.neutron [-] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.381654] env[61905]: DEBUG nova.compute.manager [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 871.381654] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 871.382779] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd70d934-b598-466b-9866-a3da0c24c9ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.392103] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 871.392414] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec13201f-1bdd-4985-8db5-222b6034785a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.406118] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362537, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.457107] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5229a74b-3440-e67c-388e-a348d42d3579, 'name': SearchDatastore_Task, 'duration_secs': 0.013105} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.457471] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.457743] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 2c919b69-0e09-431d-8a75-98d5740c7dab/2c919b69-0e09-431d-8a75-98d5740c7dab.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.459048] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06efac0d-d1c8-4bdc-986e-dca3d78040a2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.471673] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 871.471673] env[61905]: value = "task-1362540" [ 871.471673] env[61905]: _type = "Task" [ 871.471673] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.486287] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.489471] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.489829] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.495532] env[61905]: DEBUG nova.compute.utils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 871.503048] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 871.503261] env[61905]: DEBUG nova.network.neutron [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.519965] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362534, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.604131] env[61905]: DEBUG nova.network.neutron [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.609020] env[61905]: DEBUG nova.policy [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c333d75b5a4595bc6ccefb241746ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8bba93d3fa8a4462825a606bd11032ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 871.633655] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362535, 'name': ReconfigVM_Task, 'duration_secs': 0.807494} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.634331] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Reconfigured VM instance instance-00000044 to attach disk [datastore2] a6e45dd1-e0ee-4bda-9513-4b1000e15e49/a6e45dd1-e0ee-4bda-9513-4b1000e15e49.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.634989] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a829eab-5fed-49fb-b5a9-ef620ee4eeae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.647709] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 871.647709] env[61905]: value = "task-1362541" [ 871.647709] env[61905]: _type = "Task" [ 871.647709] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.668118] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362541, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.708650] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362538, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.888459] env[61905]: INFO nova.compute.manager [-] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Took 1.36 seconds to deallocate network for instance. [ 871.892999] env[61905]: DEBUG nova.compute.manager [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Received event network-changed-e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.894021] env[61905]: DEBUG nova.compute.manager [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Refreshing instance network info cache due to event network-changed-e1eae77b-b6ce-4fab-9407-143d4f0555ff. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.894021] env[61905]: DEBUG oslo_concurrency.lockutils [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] Acquiring lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.894021] env[61905]: DEBUG oslo_concurrency.lockutils [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] Acquired lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.894021] env[61905]: DEBUG nova.network.neutron [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Refreshing network info cache for port e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.909700] env[61905]: DEBUG oslo_vmware.api [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362537, 'name': PowerOnVM_Task, 'duration_secs': 0.792188} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.910693] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.910938] env[61905]: INFO nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Took 11.09 seconds to spawn the instance on the hypervisor. [ 871.911209] env[61905]: DEBUG nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 871.912113] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20fe0a5-5a1b-4551-9f44-86c0fd27abd4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.990196] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362540, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.003501] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 872.006304] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 872.023675] env[61905]: DEBUG oslo_vmware.api [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362534, 'name': RemoveSnapshot_Task, 'duration_secs': 1.061731} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.025018] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Deleted Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 872.025018] env[61905]: INFO nova.compute.manager [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Took 16.76 seconds to snapshot the instance on the hypervisor. [ 872.069725] env[61905]: DEBUG nova.network.neutron [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Successfully created port: 0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.105962] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.106352] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance network_info: |[{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 872.106704] env[61905]: DEBUG oslo_concurrency.lockutils [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.106932] env[61905]: DEBUG nova.network.neutron [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.108095] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:2c:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.116743] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating folder: Project (a78ffb1a94ca4220a39c68529eb5693d). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.124604] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f9e5859-6f5b-42eb-9c37-2d95acd31e91 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.129743] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.130040] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.143586] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Created folder: Project (a78ffb1a94ca4220a39c68529eb5693d) in parent group-v289968. [ 872.143586] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating folder: Instances. Parent ref: group-v290044. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.143759] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-205dc093-1115-411f-9554-628a330ee124 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.160277] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362541, 'name': Rename_Task, 'duration_secs': 0.3501} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.165008] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.165331] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Created folder: Instances in parent group-v290044. [ 872.165544] env[61905]: DEBUG oslo.service.loopingcall [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.166290] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b7ad037-333c-4771-a9e4-ef92c146fcd4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.168212] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.168938] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79a07912-64fa-4148-97da-82a4a4e99fa7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.195072] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 872.195072] env[61905]: value = "task-1362544" [ 872.195072] env[61905]: _type = "Task" [ 872.195072] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.196398] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.196398] env[61905]: value = "task-1362545" [ 872.196398] env[61905]: _type = "Task" [ 872.196398] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.213993] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.217288] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362545, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.225557] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362538, 'name': ReconfigVM_Task, 'duration_secs': 0.573828} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.228502] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 38b80675-182a-422c-9222-aa78ed59c351/38b80675-182a-422c-9222-aa78ed59c351.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.229214] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-993a2928-536b-4b3e-b9d3-8b0be8662d8c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.243088] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 872.243088] env[61905]: value = "task-1362546" [ 872.243088] env[61905]: _type = "Task" [ 872.243088] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.252596] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362546, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.400847] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.414635] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42149287-19b5-4d56-9321-58cd55b5c7c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.425076] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42196428-3a6e-4d7f-9cd8-7fab757b8c79 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.440273] env[61905]: INFO nova.compute.manager [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Took 32.84 seconds to build instance. [ 872.499637] env[61905]: DEBUG oslo_concurrency.lockutils [None req-128d0ebf-0e10-417d-898c-8f147c78b94a tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "e3b11ed6-b703-43a6-a528-28520ed43233" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.084s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.507070] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7405755-ebc6-4cbf-a6ae-ca3d9d18eb48 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.530909] env[61905]: DEBUG nova.compute.manager [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Instance disappeared during snapshot {{(pid=61905) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 872.533820] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518595} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.540070] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 2c919b69-0e09-431d-8a75-98d5740c7dab/2c919b69-0e09-431d-8a75-98d5740c7dab.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.540479] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.541298] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e678ecf-c4f3-42b9-8678-2b1314476aed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.547028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873177f7-fe1c-42fb-9365-9ba37b0693b0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.558448] env[61905]: DEBUG nova.compute.manager [None req-c5dccdbe-ae8a-4758-b527-77aefc538ce9 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image not found during clean up aef12d3a-bf73-4159-9d46-dd0a3ceba133 {{(pid=61905) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4500}} [ 872.562847] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.584508] env[61905]: DEBUG nova.compute.provider_tree [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.590257] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 872.590257] env[61905]: value = "task-1362547" [ 872.590257] env[61905]: _type = "Task" [ 872.590257] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.605163] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.632080] env[61905]: DEBUG nova.network.neutron [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updated VIF entry in instance network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.633285] env[61905]: DEBUG nova.network.neutron [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.671342] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 872.671728] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 872.672052] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleting the datastore file [datastore1] e1a22f3e-4557-44d2-8e34-cc75f573fe41 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 872.672422] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ac3bd66-a363-49d7-b4ce-3088c0f05793 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.686214] env[61905]: DEBUG oslo_vmware.api [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 872.686214] env[61905]: value = "task-1362548" [ 872.686214] env[61905]: _type = "Task" [ 872.686214] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.706558] env[61905]: DEBUG oslo_vmware.api [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.715551] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362544, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.718807] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362545, 'name': CreateVM_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.755213] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362546, 'name': Rename_Task, 'duration_secs': 0.467626} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.755600] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.755862] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-537b49ec-b4c2-427c-b335-e9ee018f8225 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.763747] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 872.763747] env[61905]: value = "task-1362549" [ 872.763747] env[61905]: _type = "Task" [ 872.763747] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.773600] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.791697] env[61905]: DEBUG nova.network.neutron [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updated VIF entry in instance network info cache for port e1eae77b-b6ce-4fab-9407-143d4f0555ff. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.792153] env[61905]: DEBUG nova.network.neutron [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updating instance_info_cache with network_info: [{"id": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "address": "fa:16:3e:f3:57:fa", "network": {"id": "543f478c-b8f2-4bd5-9354-e98f24dc9594", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-715852827-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9c343ce0dc043d4b39b04dc6bdc70aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1eae77b-b6", "ovs_interfaceid": "e1eae77b-b6ce-4fab-9407-143d4f0555ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.890139] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.890756] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.891165] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.891508] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.891821] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.900459] env[61905]: INFO nova.compute.manager [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Terminating instance [ 872.903163] env[61905]: DEBUG nova.compute.manager [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 872.903799] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 872.904413] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d888a61-98c1-48d9-a7a5-7fc374221bf9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.917861] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 872.918174] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b96b40b-caf7-47e3-ba0e-1075b94abd7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.925491] env[61905]: DEBUG oslo_vmware.api [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 872.925491] env[61905]: value = "task-1362550" [ 872.925491] env[61905]: _type = "Task" [ 872.925491] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.935622] env[61905]: DEBUG oslo_vmware.api [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.018885] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 873.023219] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 873.049091] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 873.049378] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 873.049550] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.049798] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 873.049971] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.050140] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 873.050370] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 873.050531] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 873.050714] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 873.050885] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 873.051453] env[61905]: DEBUG nova.virt.hardware [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 873.052095] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645ea32a-80a3-4938-9e90-ad4184796ee4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.061681] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad34776a-fa9e-44a8-8c97-b75e451b781e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.095023] env[61905]: DEBUG nova.scheduler.client.report [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 873.107248] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195223} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.107619] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.109478] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22c5d60-c148-48b1-8554-9bfe482ea11f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.115452] env[61905]: DEBUG nova.compute.manager [req-1278163e-31a5-46aa-89b9-737661d30f66 req-43200182-f838-42dd-9bc4-b7ba67a2e387 service nova] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Received event network-vif-deleted-6fcd65df-60b2-46d9-bc62-81c3b64ca522 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.140378] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 2c919b69-0e09-431d-8a75-98d5740c7dab/2c919b69-0e09-431d-8a75-98d5740c7dab.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.141904] env[61905]: DEBUG oslo_concurrency.lockutils [req-27ea245e-714d-4572-bce7-d46fbc8c21ee req-454f1d49-1559-4c0d-9cb7-e16c31ebe8db service nova] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.142381] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e65b1b9-95eb-452a-830f-91d061e54635 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.164717] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 873.164717] env[61905]: value = "task-1362551" [ 873.164717] env[61905]: _type = "Task" [ 873.164717] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.175763] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.195895] env[61905]: DEBUG oslo_vmware.api [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217356} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.196206] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 873.196586] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 873.196677] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 873.196896] env[61905]: INFO nova.compute.manager [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Took 1.82 seconds to destroy the instance on the hypervisor. [ 873.197189] env[61905]: DEBUG oslo.service.loopingcall [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.197593] env[61905]: DEBUG nova.compute.manager [-] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 873.197593] env[61905]: DEBUG nova.network.neutron [-] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 873.208984] env[61905]: DEBUG oslo_vmware.api [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362544, 'name': PowerOnVM_Task, 'duration_secs': 0.973744} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.212256] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.212529] env[61905]: DEBUG nova.compute.manager [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 873.213461] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3945605a-49dd-4d0d-a0bb-dcba18fae320 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.227590] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362545, 'name': CreateVM_Task, 'duration_secs': 0.732427} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.227590] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.227757] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.227919] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.228288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 873.228568] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-881e4496-80bf-47a1-bf08-b26bb838afe7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.235146] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 873.235146] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524b6470-49ba-f4e7-4a02-9ce001c4199f" [ 873.235146] env[61905]: _type = "Task" [ 873.235146] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.246240] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524b6470-49ba-f4e7-4a02-9ce001c4199f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.278927] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362549, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.295289] env[61905]: DEBUG oslo_concurrency.lockutils [req-6b8caa94-9424-4850-8d51-62b395c38ab2 req-5515e384-f267-48ee-99af-58deb5362d3d service nova] Releasing lock "refresh_cache-9a385d72-ba5d-48e0-b71f-d37d4e63c403" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.438926] env[61905]: DEBUG oslo_vmware.api [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362550, 'name': PowerOffVM_Task, 'duration_secs': 0.22045} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.438926] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 873.438926] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 873.438926] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d985a74-5fc5-43f4-830e-247ae1ea1a3b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.523570] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 873.524263] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 873.524263] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Deleting the datastore file [datastore1] 9a385d72-ba5d-48e0-b71f-d37d4e63c403 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 873.524902] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e268896d-11ad-4a87-9fe7-1350e9905ac2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.548296] env[61905]: DEBUG oslo_vmware.api [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for the task: (returnval){ [ 873.548296] env[61905]: value = "task-1362553" [ 873.548296] env[61905]: _type = "Task" [ 873.548296] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.560382] env[61905]: DEBUG oslo_vmware.api [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.561635] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.601295] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.601993] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 873.605575] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.158s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.605922] env[61905]: DEBUG nova.objects.instance [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lazy-loading 'resources' on Instance uuid 60e68738-a333-44b2-a1e8-0b3da728059e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.677673] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362551, 'name': ReconfigVM_Task, 'duration_secs': 0.46647} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.678178] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 2c919b69-0e09-431d-8a75-98d5740c7dab/2c919b69-0e09-431d-8a75-98d5740c7dab.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.679057] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34423f5c-ccd8-4b16-9303-e66416192d6c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.690057] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 873.690057] env[61905]: value = "task-1362554" [ 873.690057] env[61905]: _type = "Task" [ 873.690057] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.697726] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362554, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.735275] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.748252] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524b6470-49ba-f4e7-4a02-9ce001c4199f, 'name': SearchDatastore_Task, 'duration_secs': 0.01168} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.751397] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.751650] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.751893] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.753357] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.753357] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.753357] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6df7b7cd-6864-4f8d-bf18-6e605e7c2e3b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.757446] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.757701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.757852] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.758053] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.758255] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.760810] env[61905]: INFO nova.compute.manager [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Terminating instance [ 873.762559] env[61905]: DEBUG nova.compute.manager [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 873.762772] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.766140] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35640ed9-24bb-4611-8e83-bb880306099f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.769903] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.770129] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.773946] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-428197cd-6747-47fa-9f8a-24dbb58d5594 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.783102] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 873.783102] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52faa3a4-5da8-0c65-adea-4bd3ebd54de0" [ 873.783102] env[61905]: _type = "Task" [ 873.783102] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.788499] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.788806] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362549, 'name': PowerOnVM_Task, 'duration_secs': 0.611358} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.791958] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ff75e5f-dc09-4abe-86ab-40d141bf8c97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.793463] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.793679] env[61905]: INFO nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Took 10.44 seconds to spawn the instance on the hypervisor. [ 873.793859] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 873.794616] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91ca84f-70a2-43ed-ad05-364f6e309f3d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.806348] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52faa3a4-5da8-0c65-adea-4bd3ebd54de0, 'name': SearchDatastore_Task, 'duration_secs': 0.011417} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.809839] env[61905]: DEBUG oslo_vmware.api [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 873.809839] env[61905]: value = "task-1362555" [ 873.809839] env[61905]: _type = "Task" [ 873.809839] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.812989] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca069426-a36f-4bba-b275-c3c9ec71a167 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.824511] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 873.824511] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525c303d-cf5a-47dc-acbd-8bd644f1dc4b" [ 873.824511] env[61905]: _type = "Task" [ 873.824511] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.828235] env[61905]: DEBUG oslo_vmware.api [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.840134] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525c303d-cf5a-47dc-acbd-8bd644f1dc4b, 'name': SearchDatastore_Task, 'duration_secs': 0.01162} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.840549] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.840832] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.841212] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c82f5419-bdf1-4fff-a4fb-5c46bb9d7eff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.849927] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 873.849927] env[61905]: value = "task-1362556" [ 873.849927] env[61905]: _type = "Task" [ 873.849927] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.862043] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.955201] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "111d10e8-7e36-48b6-be45-2275c36fbee4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.955690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.956115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "111d10e8-7e36-48b6-be45-2275c36fbee4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.956523] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.956880] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.960495] env[61905]: INFO nova.compute.manager [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Terminating instance [ 873.965367] env[61905]: DEBUG nova.compute.manager [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 873.965367] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.966743] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d0c816-e274-4a59-a72c-d852fc21384f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.978928] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.979341] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77c4091c-4116-43ae-8355-fb7e9a9c68b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.990714] env[61905]: DEBUG oslo_vmware.api [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 873.990714] env[61905]: value = "task-1362557" [ 873.990714] env[61905]: _type = "Task" [ 873.990714] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.004207] env[61905]: DEBUG oslo_vmware.api [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.038549] env[61905]: DEBUG nova.network.neutron [-] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.063482] env[61905]: DEBUG oslo_vmware.api [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Task: {'id': task-1362553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169527} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.064548] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.065272] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.065661] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.065744] env[61905]: INFO nova.compute.manager [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Took 1.16 seconds to destroy the instance on the hypervisor. [ 874.066097] env[61905]: DEBUG oslo.service.loopingcall [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.066650] env[61905]: DEBUG nova.compute.manager [-] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 874.066765] env[61905]: DEBUG nova.network.neutron [-] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 874.103186] env[61905]: DEBUG nova.compute.manager [req-4b38ffa2-e9dc-4d41-bb5d-a835f31782bc req-77ebbb30-d44d-4743-959a-af645cb818f7 service nova] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Received event network-vif-deleted-ba90cb84-a34a-4577-a80d-54b1b531778d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 874.109125] env[61905]: DEBUG nova.compute.utils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.110706] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 874.110867] env[61905]: DEBUG nova.network.neutron [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.113519] env[61905]: DEBUG nova.network.neutron [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Successfully updated port: 0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.179991] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.180288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.180467] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.180694] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.180907] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.186802] env[61905]: INFO nova.compute.manager [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Terminating instance [ 874.190282] env[61905]: DEBUG nova.compute.manager [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 874.190550] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.195531] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7a36fe-b8ef-428b-ac8d-f0b8f2158e6a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.211803] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.213776] env[61905]: DEBUG nova.policy [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ff25da762d5421b9f1e24e4bcead22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cd0317a9e0e4f1d86c49a82e8ffbaa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 874.218457] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-343428ae-f392-4180-b3aa-c75b59b9b883 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.220611] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362554, 'name': Rename_Task, 'duration_secs': 0.228167} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.223929] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.224856] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d299bed-1563-4476-a1d2-8e1378267077 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.235062] env[61905]: DEBUG oslo_vmware.api [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 874.235062] env[61905]: value = "task-1362558" [ 874.235062] env[61905]: _type = "Task" [ 874.235062] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.240045] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 874.240045] env[61905]: value = "task-1362559" [ 874.240045] env[61905]: _type = "Task" [ 874.240045] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.252687] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "e3b11ed6-b703-43a6-a528-28520ed43233" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.252934] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "e3b11ed6-b703-43a6-a528-28520ed43233" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.253134] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "e3b11ed6-b703-43a6-a528-28520ed43233-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.253356] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "e3b11ed6-b703-43a6-a528-28520ed43233-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.253516] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "e3b11ed6-b703-43a6-a528-28520ed43233-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.262407] env[61905]: DEBUG oslo_vmware.api [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.263085] env[61905]: INFO nova.compute.manager [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Terminating instance [ 874.268538] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.269279] env[61905]: DEBUG nova.compute.manager [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 874.269511] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.270527] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c94aa1-9f75-4e78-8c10-c30c32a5ec42 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.284949] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.285402] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b9c01e7-62d9-4bc3-bb5b-b7abd7dd10ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.294380] env[61905]: DEBUG oslo_vmware.api [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 874.294380] env[61905]: value = "task-1362560" [ 874.294380] env[61905]: _type = "Task" [ 874.294380] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.308581] env[61905]: DEBUG oslo_vmware.api [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.330529] env[61905]: INFO nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Took 30.51 seconds to build instance. [ 874.336070] env[61905]: DEBUG oslo_vmware.api [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362555, 'name': PowerOffVM_Task, 'duration_secs': 0.202921} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.336741] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.337063] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.337190] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16128861-c9f1-42ea-9f19-41ca9b0d5ee8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.376197] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362556, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.441038] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.441443] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.441695] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleting the datastore file [datastore2] 74f94a46-63e4-44e0-9142-7e7d46cd31a7 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.442310] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bc691b2-b23f-4d17-b286-3c1f4e7503be {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.454566] env[61905]: DEBUG oslo_vmware.api [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 874.454566] env[61905]: value = "task-1362562" [ 874.454566] env[61905]: _type = "Task" [ 874.454566] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.467998] env[61905]: DEBUG oslo_vmware.api [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.504816] env[61905]: DEBUG oslo_vmware.api [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362557, 'name': PowerOffVM_Task, 'duration_secs': 0.29666} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.505110] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.505285] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.505621] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f6249d0-7839-4bd3-af34-dbbe8037bf7b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.542186] env[61905]: INFO nova.compute.manager [-] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Took 1.34 seconds to deallocate network for instance. [ 874.545776] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71f9b93-c2ee-4fd0-9b26-b2da29129bb4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.558595] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28329aa-e680-4f8f-8675-d2f5d5061b4f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.597594] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bfeda0-6d43-43b9-8caf-62084ffd88e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.600757] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.600971] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.601175] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleting the datastore file [datastore2] 111d10e8-7e36-48b6-be45-2275c36fbee4 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.601970] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c5dbdc9-6dd3-440f-8e21-cd091eb4f4ea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.612314] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b5882c-54df-4e1c-a211-24810441e406 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.616716] env[61905]: DEBUG oslo_vmware.api [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for the task: (returnval){ [ 874.616716] env[61905]: value = "task-1362564" [ 874.616716] env[61905]: _type = "Task" [ 874.616716] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.616929] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 874.635261] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "refresh_cache-299479fb-9a94-40b8-928d-8e491dbe1af1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.635428] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquired lock "refresh_cache-299479fb-9a94-40b8-928d-8e491dbe1af1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.635582] env[61905]: DEBUG nova.network.neutron [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.637131] env[61905]: DEBUG nova.compute.provider_tree [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.646072] env[61905]: DEBUG oslo_vmware.api [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.754398] env[61905]: DEBUG nova.network.neutron [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Successfully created port: 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.756543] env[61905]: DEBUG oslo_vmware.api [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362558, 'name': PowerOffVM_Task, 'duration_secs': 0.269021} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.759616] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.759798] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.760399] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9bb1c09-c6fc-4cbe-abeb-055e84b4d033 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.768711] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362559, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.805207] env[61905]: DEBUG oslo_vmware.api [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362560, 'name': PowerOffVM_Task, 'duration_secs': 0.27713} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.805551] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.805755] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.806080] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a76633c-0e2d-4754-b7a9-a00f328702e3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.837275] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "38b80675-182a-422c-9222-aa78ed59c351" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.659s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.842641] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.842909] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.843218] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Deleting the datastore file [datastore1] 4bb7a2df-b472-4f6d-8a01-a55d0b86efda {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.843526] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a244568-cbc2-4d8a-a743-737c46733b27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.852711] env[61905]: DEBUG oslo_vmware.api [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for the task: (returnval){ [ 874.852711] env[61905]: value = "task-1362567" [ 874.852711] env[61905]: _type = "Task" [ 874.852711] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.865651] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362556, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599904} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.870333] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.870333] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.870333] env[61905]: DEBUG oslo_vmware.api [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.870333] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c457bb3e-a13c-482f-b338-dfe8da30e2b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.878183] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 874.878183] env[61905]: value = "task-1362568" [ 874.878183] env[61905]: _type = "Task" [ 874.878183] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.887582] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.906991] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.906991] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.906991] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleting the datastore file [datastore2] e3b11ed6-b703-43a6-a528-28520ed43233 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.907394] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cadb76a-97d9-4399-a14c-71f83952575a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.918838] env[61905]: DEBUG oslo_vmware.api [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 874.918838] env[61905]: value = "task-1362569" [ 874.918838] env[61905]: _type = "Task" [ 874.918838] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.927767] env[61905]: DEBUG oslo_vmware.api [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.967056] env[61905]: DEBUG oslo_vmware.api [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289475} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.967708] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.967708] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.967708] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.967917] env[61905]: INFO nova.compute.manager [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Took 1.21 seconds to destroy the instance on the hypervisor. [ 874.968193] env[61905]: DEBUG oslo.service.loopingcall [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.968317] env[61905]: DEBUG nova.compute.manager [-] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 874.968402] env[61905]: DEBUG nova.network.neutron [-] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.034171] env[61905]: DEBUG nova.network.neutron [-] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.057304] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.139567] env[61905]: DEBUG oslo_vmware.api [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Task: {'id': task-1362564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287809} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.146190] env[61905]: DEBUG nova.scheduler.client.report [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.149284] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.149476] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.149679] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.149857] env[61905]: INFO nova.compute.manager [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 875.153691] env[61905]: DEBUG oslo.service.loopingcall [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.155040] env[61905]: DEBUG nova.compute.manager [-] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.155145] env[61905]: DEBUG nova.network.neutron [-] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.210828] env[61905]: DEBUG nova.network.neutron [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.226963] env[61905]: DEBUG nova.compute.manager [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Received event network-vif-plugged-0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.227223] env[61905]: DEBUG oslo_concurrency.lockutils [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] Acquiring lock "299479fb-9a94-40b8-928d-8e491dbe1af1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.227448] env[61905]: DEBUG oslo_concurrency.lockutils [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.227626] env[61905]: DEBUG oslo_concurrency.lockutils [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.227838] env[61905]: DEBUG nova.compute.manager [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] No waiting events found dispatching network-vif-plugged-0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 875.228048] env[61905]: WARNING nova.compute.manager [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Received unexpected event network-vif-plugged-0f772b14-7fff-42ea-9082-d9759393537d for instance with vm_state building and task_state spawning. [ 875.228222] env[61905]: DEBUG nova.compute.manager [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Received event network-changed-0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.228383] env[61905]: DEBUG nova.compute.manager [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Refreshing instance network info cache due to event network-changed-0f772b14-7fff-42ea-9082-d9759393537d. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 875.228550] env[61905]: DEBUG oslo_concurrency.lockutils [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] Acquiring lock "refresh_cache-299479fb-9a94-40b8-928d-8e491dbe1af1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.266857] env[61905]: DEBUG oslo_vmware.api [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362559, 'name': PowerOnVM_Task, 'duration_secs': 0.857351} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.269463] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.269726] env[61905]: INFO nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Took 9.27 seconds to spawn the instance on the hypervisor. [ 875.269973] env[61905]: DEBUG nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 875.270876] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6ebe45-8bf7-43f3-906f-36accd3a3a58 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.371325] env[61905]: DEBUG oslo_vmware.api [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Task: {'id': task-1362567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.259947} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.371555] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.371754] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.371940] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.372132] env[61905]: INFO nova.compute.manager [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Took 1.18 seconds to destroy the instance on the hypervisor. [ 875.372378] env[61905]: DEBUG oslo.service.loopingcall [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.372572] env[61905]: DEBUG nova.compute.manager [-] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.372664] env[61905]: DEBUG nova.network.neutron [-] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.389796] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093557} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.389796] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.389948] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c7f58f-ef49-4f89-9e71-bfd8dcda9203 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.414368] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.416117] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89d4262d-52d5-496b-abf6-7a17f07b46e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.445942] env[61905]: DEBUG oslo_vmware.api [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219424} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.447939] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.448222] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.448450] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.448664] env[61905]: INFO nova.compute.manager [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Took 1.18 seconds to destroy the instance on the hypervisor. [ 875.448988] env[61905]: DEBUG oslo.service.loopingcall [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.449302] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 875.449302] env[61905]: value = "task-1362570" [ 875.449302] env[61905]: _type = "Task" [ 875.449302] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.449569] env[61905]: DEBUG nova.compute.manager [-] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 875.449714] env[61905]: DEBUG nova.network.neutron [-] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.461044] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.503089] env[61905]: DEBUG nova.network.neutron [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Updating instance_info_cache with network_info: [{"id": "0f772b14-7fff-42ea-9082-d9759393537d", "address": "fa:16:3e:eb:c1:1d", "network": {"id": "94a4d9c6-76fa-4254-94ec-89eb7f8b6a14", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1329214929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bba93d3fa8a4462825a606bd11032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f772b14-7f", "ovs_interfaceid": "0f772b14-7fff-42ea-9082-d9759393537d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.536871] env[61905]: INFO nova.compute.manager [-] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Took 1.47 seconds to deallocate network for instance. [ 875.627029] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.627714] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.627714] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.627821] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.627955] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.630673] env[61905]: INFO nova.compute.manager [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Terminating instance [ 875.636439] env[61905]: DEBUG nova.compute.manager [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 875.636439] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.637072] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f616b-4592-416c-81e0-06405624e591 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.646074] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 875.647830] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.648558] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e19e6df-18b8-4018-bf88-f57a72afd5ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.655227] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.658389] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.268s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.659868] env[61905]: INFO nova.compute.claims [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.662584] env[61905]: DEBUG oslo_vmware.api [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 875.662584] env[61905]: value = "task-1362571" [ 875.662584] env[61905]: _type = "Task" [ 875.662584] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.676885] env[61905]: DEBUG oslo_vmware.api [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.679543] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 875.679953] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 875.680140] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.680349] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 875.680752] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.680906] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 875.681148] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 875.681308] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 875.681473] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 875.681837] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 875.682069] env[61905]: DEBUG nova.virt.hardware [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 875.683134] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a463968-c75b-4faf-b6d3-67c4163a0915 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.687020] env[61905]: INFO nova.scheduler.client.report [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Deleted allocations for instance 60e68738-a333-44b2-a1e8-0b3da728059e [ 875.694366] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea352fa-b220-40f1-8dd0-0bbc3ede6c38 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.790907] env[61905]: INFO nova.compute.manager [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Took 31.46 seconds to build instance. [ 875.843207] env[61905]: DEBUG nova.network.neutron [-] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.962965] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.006701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Releasing lock "refresh_cache-299479fb-9a94-40b8-928d-8e491dbe1af1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.006701] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Instance network_info: |[{"id": "0f772b14-7fff-42ea-9082-d9759393537d", "address": "fa:16:3e:eb:c1:1d", "network": {"id": "94a4d9c6-76fa-4254-94ec-89eb7f8b6a14", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1329214929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bba93d3fa8a4462825a606bd11032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f772b14-7f", "ovs_interfaceid": "0f772b14-7fff-42ea-9082-d9759393537d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 876.007093] env[61905]: DEBUG oslo_concurrency.lockutils [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] Acquired lock "refresh_cache-299479fb-9a94-40b8-928d-8e491dbe1af1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.007286] env[61905]: DEBUG nova.network.neutron [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Refreshing network info cache for port 0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.008693] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:c1:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f772b14-7fff-42ea-9082-d9759393537d', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.017293] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Creating folder: Project (8bba93d3fa8a4462825a606bd11032ca). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.020856] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b1c850a-9fed-4a3a-a758-12d5a2050c3e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.035476] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Created folder: Project (8bba93d3fa8a4462825a606bd11032ca) in parent group-v289968. [ 876.035476] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Creating folder: Instances. Parent ref: group-v290047. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.035759] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0761b656-825d-46c3-897b-e066c6c5b3a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.042613] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.048948] env[61905]: DEBUG nova.network.neutron [-] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.048948] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Created folder: Instances in parent group-v290047. [ 876.049110] env[61905]: DEBUG oslo.service.loopingcall [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.049317] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.049561] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d04ab482-c5e3-41fa-b8c0-20bef7893a63 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.068201] env[61905]: INFO nova.compute.manager [-] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Took 0.91 seconds to deallocate network for instance. [ 876.075962] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.075962] env[61905]: value = "task-1362574" [ 876.075962] env[61905]: _type = "Task" [ 876.075962] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.086915] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362574, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.178812] env[61905]: DEBUG nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Received event network-vif-deleted-e1eae77b-b6ce-4fab-9407-143d4f0555ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.179429] env[61905]: DEBUG nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Received event network-vif-deleted-6d6e4a71-550f-4400-af0a-c8e5178259bf {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.179709] env[61905]: DEBUG nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Received event network-vif-deleted-94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.179918] env[61905]: INFO nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Neutron deleted interface 94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00; detaching it from the instance and deleting it from the info cache [ 876.180138] env[61905]: DEBUG nova.network.neutron [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.186318] env[61905]: DEBUG oslo_vmware.api [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362571, 'name': PowerOffVM_Task, 'duration_secs': 0.387924} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.188870] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.189121] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.189898] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b45785d7-5fc5-4030-bab6-641275661c5d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.197193] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a9843faf-c3c6-48a3-a11c-7747f3739a23 tempest-ListServerFiltersTestJSON-437651468 tempest-ListServerFiltersTestJSON-437651468-project-member] Lock "60e68738-a333-44b2-a1e8-0b3da728059e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.689s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.290875] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c3b2c177-6641-4e1f-9d00-7faf24ad451e tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.078s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.300241] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.300241] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.300241] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleting the datastore file [datastore2] a6e45dd1-e0ee-4bda-9513-4b1000e15e49 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.300241] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-797059f6-179d-4bd0-b682-e115bb176244 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.305219] env[61905]: DEBUG oslo_vmware.api [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 876.305219] env[61905]: value = "task-1362576" [ 876.305219] env[61905]: _type = "Task" [ 876.305219] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.313804] env[61905]: DEBUG oslo_vmware.api [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.340276] env[61905]: DEBUG nova.network.neutron [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Updated VIF entry in instance network info cache for port 0f772b14-7fff-42ea-9082-d9759393537d. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.340633] env[61905]: DEBUG nova.network.neutron [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Updating instance_info_cache with network_info: [{"id": "0f772b14-7fff-42ea-9082-d9759393537d", "address": "fa:16:3e:eb:c1:1d", "network": {"id": "94a4d9c6-76fa-4254-94ec-89eb7f8b6a14", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1329214929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8bba93d3fa8a4462825a606bd11032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f772b14-7f", "ovs_interfaceid": "0f772b14-7fff-42ea-9082-d9759393537d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.342554] env[61905]: DEBUG nova.network.neutron [-] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.345951] env[61905]: INFO nova.compute.manager [-] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Took 1.38 seconds to deallocate network for instance. [ 876.407493] env[61905]: DEBUG nova.network.neutron [-] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.447069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.447312] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.459029] env[61905]: DEBUG nova.network.neutron [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Successfully updated port: 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 876.466353] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362570, 'name': ReconfigVM_Task, 'duration_secs': 0.739127} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.466942] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.467639] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8f7a8fe-a6cd-49d8-ae70-92513fa34e4e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.474906] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 876.474906] env[61905]: value = "task-1362577" [ 876.474906] env[61905]: _type = "Task" [ 876.474906] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.482824] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362577, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.580411] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.590899] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362574, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.683283] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc40f4d2-d8ca-4c10-83c1-e19741e75ed7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.691904] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04d4e16-4305-4a2c-b773-2289ee0dc3ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.720259] env[61905]: DEBUG nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Detach interface failed, port_id=94d96dc3-3d3e-4f14-8f60-6b0bc5cc9a00, reason: Instance 4bb7a2df-b472-4f6d-8a01-a55d0b86efda could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 876.720518] env[61905]: DEBUG nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Received event network-vif-deleted-63b71759-5a81-4ed4-8ced-62e30a16037e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.720694] env[61905]: INFO nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Neutron deleted interface 63b71759-5a81-4ed4-8ced-62e30a16037e; detaching it from the instance and deleting it from the info cache [ 876.720868] env[61905]: DEBUG nova.network.neutron [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.820872] env[61905]: DEBUG oslo_vmware.api [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328705} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.825851] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.825851] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.825851] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.825851] env[61905]: INFO nova.compute.manager [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Took 1.19 seconds to destroy the instance on the hypervisor. [ 876.825851] env[61905]: DEBUG oslo.service.loopingcall [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.828926] env[61905]: DEBUG nova.compute.manager [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 876.829091] env[61905]: DEBUG nova.network.neutron [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.845298] env[61905]: INFO nova.compute.manager [-] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Took 1.47 seconds to deallocate network for instance. [ 876.846027] env[61905]: DEBUG oslo_concurrency.lockutils [req-55cec422-d8a8-491a-8ec8-9887271dd6b7 req-0a35e4f1-7428-49f2-b178-29234769ac07 service nova] Releasing lock "refresh_cache-299479fb-9a94-40b8-928d-8e491dbe1af1" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.852745] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.909930] env[61905]: INFO nova.compute.manager [-] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Took 1.46 seconds to deallocate network for instance. [ 876.950880] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 876.962743] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.962901] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.963080] env[61905]: DEBUG nova.network.neutron [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 876.993225] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362577, 'name': Rename_Task, 'duration_secs': 0.197316} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.993225] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.993581] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c3549a15-005a-4221-b231-bfe733120de1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.002345] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 877.002345] env[61905]: value = "task-1362578" [ 877.002345] env[61905]: _type = "Task" [ 877.002345] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.016979] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362578, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.039606] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5959cc9-f99e-4ef4-959e-4ffb0eabe630 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.048717] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fee669-0098-47d7-84b5-c01c4a751391 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.087885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515d870c-532e-4078-9f15-7b1d9850fa75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.100796] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee03e33-12db-40ec-86b7-2941aeac195e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.104731] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362574, 'name': CreateVM_Task, 'duration_secs': 0.592777} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.104911] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.105972] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.106212] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.106550] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.106816] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f53862-2cb1-4e9c-b53b-03404565e892 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.116962] env[61905]: DEBUG nova.compute.provider_tree [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.122745] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 877.122745] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522c0e28-607f-2a3e-cc1a-23888cdf192e" [ 877.122745] env[61905]: _type = "Task" [ 877.122745] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.132155] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522c0e28-607f-2a3e-cc1a-23888cdf192e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.225435] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73942c38-2a2e-4506-a8f1-6ff45e2c4336 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.235622] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8eda508-09d0-4da1-b4b7-1d8123946875 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.262783] env[61905]: DEBUG nova.compute.manager [req-0d7005ed-3732-45f2-b494-4304929e7879 req-5c42265c-1afd-4813-b8cb-34aec5a7ff86 service nova] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Detach interface failed, port_id=63b71759-5a81-4ed4-8ced-62e30a16037e, reason: Instance e3b11ed6-b703-43a6-a528-28520ed43233 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 877.330544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "38b80675-182a-422c-9222-aa78ed59c351" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.330544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "38b80675-182a-422c-9222-aa78ed59c351" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.330544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "38b80675-182a-422c-9222-aa78ed59c351-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.330544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "38b80675-182a-422c-9222-aa78ed59c351-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.330544] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "38b80675-182a-422c-9222-aa78ed59c351-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.332689] env[61905]: INFO nova.compute.manager [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Terminating instance [ 877.334717] env[61905]: DEBUG nova.compute.manager [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 877.334882] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.335840] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1de035-7465-47c8-ad45-1691fd372840 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.343454] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.343699] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b06e53c1-2c4c-41ab-b65a-33be689e2a49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.354374] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.354374] env[61905]: DEBUG oslo_vmware.api [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 877.354374] env[61905]: value = "task-1362579" [ 877.354374] env[61905]: _type = "Task" [ 877.354374] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.362750] env[61905]: DEBUG oslo_vmware.api [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362579, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.411424] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "2c919b69-0e09-431d-8a75-98d5740c7dab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.411424] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.411424] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "2c919b69-0e09-431d-8a75-98d5740c7dab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.411729] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.413565] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.417181] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.417484] env[61905]: INFO nova.compute.manager [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Terminating instance [ 877.421341] env[61905]: DEBUG nova.compute.manager [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 877.421831] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.423074] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4479b77-c055-406b-9e66-7ec51a84bf21 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.434800] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.436353] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4f5f89b-ad17-4b86-a50b-2a860fc66295 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.443184] env[61905]: DEBUG oslo_vmware.api [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 877.443184] env[61905]: value = "task-1362580" [ 877.443184] env[61905]: _type = "Task" [ 877.443184] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.452431] env[61905]: DEBUG oslo_vmware.api [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.485685] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.497538] env[61905]: DEBUG nova.compute.manager [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Received event network-vif-deleted-f0c313e8-8429-4144-9f60-e93ba0ce30d1 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.497741] env[61905]: DEBUG nova.compute.manager [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-vif-plugged-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.497940] env[61905]: DEBUG oslo_concurrency.lockutils [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.498175] env[61905]: DEBUG oslo_concurrency.lockutils [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.498344] env[61905]: DEBUG oslo_concurrency.lockutils [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.498509] env[61905]: DEBUG nova.compute.manager [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] No waiting events found dispatching network-vif-plugged-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 877.498673] env[61905]: WARNING nova.compute.manager [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received unexpected event network-vif-plugged-7c93f7f3-4702-4071-8e42-c0627b146af5 for instance with vm_state building and task_state spawning. [ 877.499082] env[61905]: DEBUG nova.compute.manager [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.499450] env[61905]: DEBUG nova.compute.manager [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing instance network info cache due to event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 877.499644] env[61905]: DEBUG oslo_concurrency.lockutils [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.513565] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362578, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.528916] env[61905]: DEBUG nova.network.neutron [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.620931] env[61905]: DEBUG nova.scheduler.client.report [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.634406] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522c0e28-607f-2a3e-cc1a-23888cdf192e, 'name': SearchDatastore_Task, 'duration_secs': 0.016877} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.637314] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.637636] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.637964] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.638173] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.638390] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.639353] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66a87551-6475-41ce-8d61-62f83e79e44f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.650483] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.650633] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.651819] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c209191-5cc2-47b4-b70e-ec145ae846f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.657454] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 877.657454] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e901d9-52f7-ad85-b1c9-d17d8452b99c" [ 877.657454] env[61905]: _type = "Task" [ 877.657454] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.667368] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e901d9-52f7-ad85-b1c9-d17d8452b99c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.721344] env[61905]: DEBUG nova.network.neutron [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.864498] env[61905]: DEBUG oslo_vmware.api [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362579, 'name': PowerOffVM_Task, 'duration_secs': 0.389879} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.864787] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.864989] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.865318] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5e0f287-fd57-465d-80a7-3cd23cdd43de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.881054] env[61905]: DEBUG nova.network.neutron [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.953911] env[61905]: DEBUG oslo_vmware.api [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362580, 'name': PowerOffVM_Task, 'duration_secs': 0.332998} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.954311] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.954508] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.954802] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19b9effc-d64a-4ad5-bbc6-2eccbfbf9455 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.983642] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 877.983964] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 877.984310] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleting the datastore file [datastore2] 38b80675-182a-422c-9222-aa78ed59c351 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 877.984623] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2a55250-f4cf-41c9-bdae-08fdfb6e9748 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.992185] env[61905]: DEBUG oslo_vmware.api [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 877.992185] env[61905]: value = "task-1362583" [ 877.992185] env[61905]: _type = "Task" [ 877.992185] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.002302] env[61905]: DEBUG oslo_vmware.api [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.012817] env[61905]: DEBUG oslo_vmware.api [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362578, 'name': PowerOnVM_Task, 'duration_secs': 0.869789} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.013133] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 878.013362] env[61905]: INFO nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Took 9.51 seconds to spawn the instance on the hypervisor. [ 878.013611] env[61905]: DEBUG nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 878.014401] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd27670-ec2d-4a1e-ad4d-59195513c331 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.025673] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.025900] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.026130] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleting the datastore file [datastore2] 2c919b69-0e09-431d-8a75-98d5740c7dab {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.026613] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f22014a-ec3f-4d97-878c-5df95eaab741 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.033396] env[61905]: DEBUG oslo_vmware.api [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 878.033396] env[61905]: value = "task-1362584" [ 878.033396] env[61905]: _type = "Task" [ 878.033396] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.041804] env[61905]: DEBUG oslo_vmware.api [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.129991] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.130406] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 878.133225] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.669s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.133372] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.133449] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 878.133748] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.499s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.134622] env[61905]: DEBUG nova.objects.instance [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'resources' on Instance uuid a9ac365e-2be1-438d-a514-6fa7b26fa10c {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.138492] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54509c93-3c3b-4319-a059-585125b2dde9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.148534] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e96c8e1-180a-4b7f-a7e1-e24269816906 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.172324] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca5e236-20ba-4c3f-bb9a-a9334645566b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.184071] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e901d9-52f7-ad85-b1c9-d17d8452b99c, 'name': SearchDatastore_Task, 'duration_secs': 0.011737} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.186952] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46620ebc-6660-4b4f-89dc-1db422d86926 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.190237] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03bff61-6e84-4295-8bb2-dd7b2ca8c27e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.197365] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 878.197365] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52873547-1e41-cb36-3eed-99fde34dc360" [ 878.197365] env[61905]: _type = "Task" [ 878.197365] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.223371] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181042MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 878.223559] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.227077] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.227391] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Instance network_info: |[{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 878.227708] env[61905]: DEBUG oslo_concurrency.lockutils [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.227888] env[61905]: DEBUG nova.network.neutron [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 878.229148] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:1d:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9c4edd5-d88e-4996-afea-00130ace0dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c93f7f3-4702-4071-8e42-c0627b146af5', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.237048] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Creating folder: Project (8cd0317a9e0e4f1d86c49a82e8ffbaa5). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.238119] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-742b7941-94fb-4f92-b358-8bed122b1401 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.246437] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52873547-1e41-cb36-3eed-99fde34dc360, 'name': SearchDatastore_Task, 'duration_secs': 0.013081} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.247195] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.247507] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 299479fb-9a94-40b8-928d-8e491dbe1af1/299479fb-9a94-40b8-928d-8e491dbe1af1.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.248044] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9de545c-394f-4a2d-862f-193b8e454137 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.258022] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 878.258022] env[61905]: value = "task-1362586" [ 878.258022] env[61905]: _type = "Task" [ 878.258022] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.258022] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Created folder: Project (8cd0317a9e0e4f1d86c49a82e8ffbaa5) in parent group-v289968. [ 878.258022] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Creating folder: Instances. Parent ref: group-v290050. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 878.259675] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f57d980-6f43-4614-87a1-0ee82a572f34 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.266439] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.267775] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Created folder: Instances in parent group-v290050. [ 878.267998] env[61905]: DEBUG oslo.service.loopingcall [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.268198] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.268410] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-321739a6-6df2-4d49-af5d-d25272e35190 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.292234] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.292234] env[61905]: value = "task-1362588" [ 878.292234] env[61905]: _type = "Task" [ 878.292234] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.303455] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362588, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.384329] env[61905]: INFO nova.compute.manager [-] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Took 1.56 seconds to deallocate network for instance. [ 878.502973] env[61905]: DEBUG oslo_vmware.api [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18882} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.503847] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.504031] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.504302] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.504486] env[61905]: INFO nova.compute.manager [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Took 1.17 seconds to destroy the instance on the hypervisor. [ 878.504746] env[61905]: DEBUG oslo.service.loopingcall [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.504968] env[61905]: DEBUG nova.compute.manager [-] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 878.505062] env[61905]: DEBUG nova.network.neutron [-] [instance: 38b80675-182a-422c-9222-aa78ed59c351] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.538764] env[61905]: INFO nova.compute.manager [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Took 32.84 seconds to build instance. [ 878.546201] env[61905]: DEBUG oslo_vmware.api [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214754} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.546628] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.546628] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.546908] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.546999] env[61905]: INFO nova.compute.manager [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Took 1.13 seconds to destroy the instance on the hypervisor. [ 878.547283] env[61905]: DEBUG oslo.service.loopingcall [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.547486] env[61905]: DEBUG nova.compute.manager [-] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 878.547583] env[61905]: DEBUG nova.network.neutron [-] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.640942] env[61905]: DEBUG nova.compute.utils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.646086] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 878.646086] env[61905]: DEBUG nova.network.neutron [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 878.745744] env[61905]: DEBUG nova.policy [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eea5f65ce8b54d4e9eb726f58a4e39a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40c35a175eec4445817a2860c1f5770d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 878.774820] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362586, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.812295] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362588, 'name': CreateVM_Task, 'duration_secs': 0.381916} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.817478] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.818452] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.818627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.818979] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.819281] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e112a5-41ae-44de-947f-53822abaffcc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.829735] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 878.829735] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5297c61c-e775-150d-3e28-f7e1726bdbaf" [ 878.829735] env[61905]: _type = "Task" [ 878.829735] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.840320] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5297c61c-e775-150d-3e28-f7e1726bdbaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.892622] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.040486] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a66eca99-45c4-46cc-a701-9752466e2c8d tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.876s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.057956] env[61905]: DEBUG oslo_concurrency.lockutils [None req-02e128ef-07b2-421b-82ac-72a084553eb6 tempest-ServersListShow296Test-398257475 tempest-ServersListShow296Test-398257475-project-member] Acquiring lock "f3e56023-a8b0-4ad6-b157-708dcbcb2d8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.058266] env[61905]: DEBUG oslo_concurrency.lockutils [None req-02e128ef-07b2-421b-82ac-72a084553eb6 tempest-ServersListShow296Test-398257475 tempest-ServersListShow296Test-398257475-project-member] Lock "f3e56023-a8b0-4ad6-b157-708dcbcb2d8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.088450] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abd54c0-9f8d-4184-837c-8ff532e7f31e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.097303] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e06c299-c0bd-493c-b7bd-570815c0c37f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.133290] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1864e44-794a-4f65-9037-18f77c6bc7a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.141917] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588f91bd-e095-4591-97b7-680a4624a008 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.151725] env[61905]: DEBUG nova.compute.manager [req-bcc066be-5dd8-4ec4-9742-fa8f425ca8f6 req-d35c42c6-01ca-4845-9e8d-367623dbafa7 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Received event network-vif-deleted-49055aca-f39c-4b95-b186-b9007d03fde8 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.151922] env[61905]: INFO nova.compute.manager [req-bcc066be-5dd8-4ec4-9742-fa8f425ca8f6 req-d35c42c6-01ca-4845-9e8d-367623dbafa7 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Neutron deleted interface 49055aca-f39c-4b95-b186-b9007d03fde8; detaching it from the instance and deleting it from the info cache [ 879.152117] env[61905]: DEBUG nova.network.neutron [req-bcc066be-5dd8-4ec4-9742-fa8f425ca8f6 req-d35c42c6-01ca-4845-9e8d-367623dbafa7 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.168515] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 879.172518] env[61905]: DEBUG nova.compute.provider_tree [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.174346] env[61905]: DEBUG nova.network.neutron [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updated VIF entry in instance network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 879.174346] env[61905]: DEBUG nova.network.neutron [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.175934] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-790a041c-2f23-4075-a605-197a8bcc59db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.185922] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6c6a79-2f9f-4311-ac46-e5abb29e6282 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.213109] env[61905]: DEBUG nova.compute.manager [req-bcc066be-5dd8-4ec4-9742-fa8f425ca8f6 req-d35c42c6-01ca-4845-9e8d-367623dbafa7 service nova] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Detach interface failed, port_id=49055aca-f39c-4b95-b186-b9007d03fde8, reason: Instance 2c919b69-0e09-431d-8a75-98d5740c7dab could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 879.266234] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362586, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602308} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.266660] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 299479fb-9a94-40b8-928d-8e491dbe1af1/299479fb-9a94-40b8-928d-8e491dbe1af1.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.266902] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.267179] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1847cb6-1686-4b29-a56d-b5fe80313d67 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.274055] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 879.274055] env[61905]: value = "task-1362589" [ 879.274055] env[61905]: _type = "Task" [ 879.274055] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.282219] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362589, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.316380] env[61905]: DEBUG nova.network.neutron [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Successfully created port: a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.339830] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5297c61c-e775-150d-3e28-f7e1726bdbaf, 'name': SearchDatastore_Task, 'duration_secs': 0.018341} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.340154] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.340392] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.340629] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.340777] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.340955] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.341238] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b99b0d7f-853f-477b-9c38-14e6d1a4b2b0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.349197] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.349350] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.350057] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa056eda-f78e-4dd0-849e-32a9102bb518 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.355127] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 879.355127] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525d0494-14df-2e28-12f3-9224b416400d" [ 879.355127] env[61905]: _type = "Task" [ 879.355127] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.362154] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525d0494-14df-2e28-12f3-9224b416400d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.522743] env[61905]: DEBUG nova.network.neutron [-] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.529820] env[61905]: DEBUG nova.compute.manager [req-73615a73-e3f4-4bd2-b28c-ed62584084eb req-d33dae65-53fc-453e-bc86-bea8c993cd7a service nova] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Received event network-vif-deleted-91bfd878-6423-4f5a-9645-1fb1d70de825 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.530173] env[61905]: DEBUG nova.compute.manager [req-73615a73-e3f4-4bd2-b28c-ed62584084eb req-d33dae65-53fc-453e-bc86-bea8c993cd7a service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Received event network-vif-deleted-76047de4-e7fa-4434-9ef3-4dcefcdf9f3f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 879.530262] env[61905]: INFO nova.compute.manager [req-73615a73-e3f4-4bd2-b28c-ed62584084eb req-d33dae65-53fc-453e-bc86-bea8c993cd7a service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Neutron deleted interface 76047de4-e7fa-4434-9ef3-4dcefcdf9f3f; detaching it from the instance and deleting it from the info cache [ 879.530469] env[61905]: DEBUG nova.network.neutron [req-73615a73-e3f4-4bd2-b28c-ed62584084eb req-d33dae65-53fc-453e-bc86-bea8c993cd7a service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.533950] env[61905]: DEBUG nova.network.neutron [-] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.567023] env[61905]: DEBUG nova.compute.manager [None req-02e128ef-07b2-421b-82ac-72a084553eb6 tempest-ServersListShow296Test-398257475 tempest-ServersListShow296Test-398257475-project-member] [instance: f3e56023-a8b0-4ad6-b157-708dcbcb2d8b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 879.681897] env[61905]: DEBUG oslo_concurrency.lockutils [req-be2c6fc1-4766-4493-a857-58581db6c602 req-78f6ed27-0fb0-41cd-bd00-ff90354558de service nova] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.683056] env[61905]: DEBUG nova.scheduler.client.report [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.787385] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362589, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069148} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.788127] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.789121] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca20e850-a8dc-4e1f-bdda-0f0a06f959a5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.820067] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 299479fb-9a94-40b8-928d-8e491dbe1af1/299479fb-9a94-40b8-928d-8e491dbe1af1.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.820455] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acd529f6-3331-41c5-8d99-10eaeae79b74 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.841819] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 879.841819] env[61905]: value = "task-1362590" [ 879.841819] env[61905]: _type = "Task" [ 879.841819] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.855441] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362590, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.865850] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525d0494-14df-2e28-12f3-9224b416400d, 'name': SearchDatastore_Task, 'duration_secs': 0.008734} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.866769] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3523e0ee-663f-4491-90bf-dbf13fd7f6b2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.872500] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 879.872500] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5295fc59-87bd-bec5-fe5b-89d7e424b93f" [ 879.872500] env[61905]: _type = "Task" [ 879.872500] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.882468] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5295fc59-87bd-bec5-fe5b-89d7e424b93f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.025789] env[61905]: INFO nova.compute.manager [-] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Took 1.52 seconds to deallocate network for instance. [ 880.033157] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e7b7b99-8041-435b-99f9-3d62499f4075 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.035257] env[61905]: INFO nova.compute.manager [-] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Took 1.49 seconds to deallocate network for instance. [ 880.046439] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0a15e8-8c7a-44d7-9d57-3062389ab604 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.068600] env[61905]: DEBUG nova.compute.manager [None req-02e128ef-07b2-421b-82ac-72a084553eb6 tempest-ServersListShow296Test-398257475 tempest-ServersListShow296Test-398257475-project-member] [instance: f3e56023-a8b0-4ad6-b157-708dcbcb2d8b] Instance disappeared before build. {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.076548] env[61905]: DEBUG nova.compute.manager [req-73615a73-e3f4-4bd2-b28c-ed62584084eb req-d33dae65-53fc-453e-bc86-bea8c993cd7a service nova] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Detach interface failed, port_id=76047de4-e7fa-4434-9ef3-4dcefcdf9f3f, reason: Instance 38b80675-182a-422c-9222-aa78ed59c351 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 880.185098] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 880.190947] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.055s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.190947] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.826s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.192572] env[61905]: INFO nova.compute.claims [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.218127] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.218127] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.218127] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.218653] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.219049] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.219342] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.219731] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.220392] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.220792] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.221224] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.221802] env[61905]: DEBUG nova.virt.hardware [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.223188] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65209341-4d3f-4806-ae14-5fe5682a8bfb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.227595] env[61905]: INFO nova.scheduler.client.report [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocations for instance a9ac365e-2be1-438d-a514-6fa7b26fa10c [ 880.243388] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446b676a-076b-4b8f-a22b-25dbd5941f8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.354859] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362590, 'name': ReconfigVM_Task, 'duration_secs': 0.301595} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.355287] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 299479fb-9a94-40b8-928d-8e491dbe1af1/299479fb-9a94-40b8-928d-8e491dbe1af1.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.356147] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9aeb6dc2-494a-45dd-845d-f2ec7b863be9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.363822] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 880.363822] env[61905]: value = "task-1362591" [ 880.363822] env[61905]: _type = "Task" [ 880.363822] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.375230] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362591, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.384909] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5295fc59-87bd-bec5-fe5b-89d7e424b93f, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.385243] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.385390] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 0f7ccb34-cb14-4b21-ae61-b066427d400e/0f7ccb34-cb14-4b21-ae61-b066427d400e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.385648] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-253483f8-ac0e-4d06-960d-0f46d89497cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.392518] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 880.392518] env[61905]: value = "task-1362592" [ 880.392518] env[61905]: _type = "Task" [ 880.392518] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.400169] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.533405] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.544610] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.589017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-02e128ef-07b2-421b-82ac-72a084553eb6 tempest-ServersListShow296Test-398257475 tempest-ServersListShow296Test-398257475-project-member] Lock "f3e56023-a8b0-4ad6-b157-708dcbcb2d8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.531s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.749026] env[61905]: DEBUG oslo_concurrency.lockutils [None req-13d114fc-3d7b-427d-87ee-4a4aa5ea5640 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "a9ac365e-2be1-438d-a514-6fa7b26fa10c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.996s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.874426] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362591, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.903063] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.211143] env[61905]: DEBUG nova.compute.manager [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 881.211143] env[61905]: DEBUG nova.compute.manager [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing instance network info cache due to event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 881.211143] env[61905]: DEBUG oslo_concurrency.lockutils [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.211143] env[61905]: DEBUG oslo_concurrency.lockutils [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.211143] env[61905]: DEBUG nova.network.neutron [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.214801] env[61905]: DEBUG nova.network.neutron [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Successfully updated port: a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.374607] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362591, 'name': Rename_Task, 'duration_secs': 0.815435} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.377278] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 881.378458] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84bf234d-4cf2-4981-84e8-4bd33338bf28 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.384664] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 881.384664] env[61905]: value = "task-1362593" [ 881.384664] env[61905]: _type = "Task" [ 881.384664] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.392648] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362593, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.402848] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.471730] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf7adfa-82e6-4763-a094-548a6bf11cce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.489016] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97858672-6f72-4509-88e5-3a352affe038 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.524618] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec896163-e4d9-459d-9392-7ebf579a5351 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.534776] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca5cd6e-0622-4aab-a755-24af1d6efeb1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.548069] env[61905]: DEBUG nova.compute.provider_tree [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.716285] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.716558] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.716653] env[61905]: DEBUG nova.network.neutron [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.898074] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362593, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.911076] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362592, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.984769] env[61905]: DEBUG nova.network.neutron [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updated VIF entry in instance network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 881.985204] env[61905]: DEBUG nova.network.neutron [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.052458] env[61905]: DEBUG nova.scheduler.client.report [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.084069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "4b1723a2-94a2-4070-9b47-85c9c8169137" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.084313] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.260114] env[61905]: DEBUG nova.network.neutron [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.395932] env[61905]: DEBUG oslo_vmware.api [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362593, 'name': PowerOnVM_Task, 'duration_secs': 0.855725} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.396254] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.396462] env[61905]: INFO nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Took 9.38 seconds to spawn the instance on the hypervisor. [ 882.396646] env[61905]: DEBUG nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 882.397602] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5941b6d-cd07-4ab1-9c71-25cf8a00f129 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.419626] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362592, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.578512} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.420120] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 0f7ccb34-cb14-4b21-ae61-b066427d400e/0f7ccb34-cb14-4b21-ae61-b066427d400e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 882.420339] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 882.420594] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca95947b-0d94-4554-8eeb-91fb0e9b62e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.431430] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 882.431430] env[61905]: value = "task-1362594" [ 882.431430] env[61905]: _type = "Task" [ 882.431430] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.440698] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.488660] env[61905]: DEBUG oslo_concurrency.lockutils [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.489063] env[61905]: DEBUG nova.compute.manager [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Received event network-vif-plugged-a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.489297] env[61905]: DEBUG oslo_concurrency.lockutils [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] Acquiring lock "e9e00459-e685-431b-b194-cf426c7a743e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.489517] env[61905]: DEBUG oslo_concurrency.lockutils [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] Lock "e9e00459-e685-431b-b194-cf426c7a743e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.489763] env[61905]: DEBUG oslo_concurrency.lockutils [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] Lock "e9e00459-e685-431b-b194-cf426c7a743e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.489901] env[61905]: DEBUG nova.compute.manager [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] No waiting events found dispatching network-vif-plugged-a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.490111] env[61905]: WARNING nova.compute.manager [req-028d412c-693c-403d-afea-ca9c2ca9b73c req-609fbf23-da7b-4b71-b279-4c9a53c08cc6 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Received unexpected event network-vif-plugged-a4b0c26b-f66a-462b-8be3-1f8271de80e6 for instance with vm_state building and task_state spawning. [ 882.556721] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.557770] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 882.565518] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.902s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.567541] env[61905]: INFO nova.compute.claims [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.586314] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 882.612617] env[61905]: DEBUG nova.network.neutron [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updating instance_info_cache with network_info: [{"id": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "address": "fa:16:3e:db:4b:19", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b0c26b-f6", "ovs_interfaceid": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.930439] env[61905]: INFO nova.compute.manager [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Took 32.13 seconds to build instance. [ 882.940670] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06024} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.940882] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.941669] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ed3807-e819-42b5-a772-018521f1477b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.963832] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 0f7ccb34-cb14-4b21-ae61-b066427d400e/0f7ccb34-cb14-4b21-ae61-b066427d400e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.964417] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e0f11b9-1f27-4351-8350-f0d7645791d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.984402] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 882.984402] env[61905]: value = "task-1362595" [ 882.984402] env[61905]: _type = "Task" [ 882.984402] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.992399] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362595, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.073529] env[61905]: DEBUG nova.compute.utils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.075081] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 883.075275] env[61905]: DEBUG nova.network.neutron [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.110176] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.118550] env[61905]: DEBUG nova.policy [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56344065ce7d4c8c908b6d77516f998d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c6a827d04af4d979146be16cd3517db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 883.120571] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.120852] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Instance network_info: |[{"id": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "address": "fa:16:3e:db:4b:19", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b0c26b-f6", "ovs_interfaceid": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 883.121493] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:4b:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '778b9a40-d603-4765-ac88-bd6d42c457a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4b0c26b-f66a-462b-8be3-1f8271de80e6', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.129020] env[61905]: DEBUG oslo.service.loopingcall [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.129364] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.129602] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48b59aec-a475-4064-a6d0-b04b9626f522 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.151427] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.151427] env[61905]: value = "task-1362596" [ 883.151427] env[61905]: _type = "Task" [ 883.151427] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.159462] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362596, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.239038] env[61905]: DEBUG nova.compute.manager [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Received event network-changed-a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.239231] env[61905]: DEBUG nova.compute.manager [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Refreshing instance network info cache due to event network-changed-a4b0c26b-f66a-462b-8be3-1f8271de80e6. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 883.239710] env[61905]: DEBUG oslo_concurrency.lockutils [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] Acquiring lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.239837] env[61905]: DEBUG oslo_concurrency.lockutils [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] Acquired lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.240148] env[61905]: DEBUG nova.network.neutron [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Refreshing network info cache for port a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.416549] env[61905]: DEBUG nova.network.neutron [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Successfully created port: c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.432333] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9c964f7d-c3a9-4a30-8970-ee495e85a73f tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.928s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.494885] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362595, 'name': ReconfigVM_Task, 'duration_secs': 0.304859} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.495242] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 0f7ccb34-cb14-4b21-ae61-b066427d400e/0f7ccb34-cb14-4b21-ae61-b066427d400e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.495886] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-406a9779-2744-4ae1-a083-0b0f7e4107b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.502431] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 883.502431] env[61905]: value = "task-1362597" [ 883.502431] env[61905]: _type = "Task" [ 883.502431] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.509804] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362597, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.579931] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 883.663878] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362596, 'name': CreateVM_Task, 'duration_secs': 0.319226} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.663878] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 883.663878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.663878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.663878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.663878] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70741e78-a17f-41bd-bad0-9ecc11f5ccf6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.671522] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 883.671522] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e0e34c-ef8d-1fbc-791e-d63f64b49814" [ 883.671522] env[61905]: _type = "Task" [ 883.671522] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.679637] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e0e34c-ef8d-1fbc-791e-d63f64b49814, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.905072] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483a38c7-ad09-4af9-bd29-f1a4cc1a2ccc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.913438] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d8eb5e-1820-48b0-a74f-0bfaad716262 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.948308] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5cc259-3a73-406e-afe1-e9c563b71e88 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.960445] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e47bc76-4a04-4e2b-b085-fd251d4d5cc3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.976162] env[61905]: DEBUG nova.compute.provider_tree [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.007378] env[61905]: DEBUG nova.network.neutron [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updated VIF entry in instance network info cache for port a4b0c26b-f66a-462b-8be3-1f8271de80e6. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.007897] env[61905]: DEBUG nova.network.neutron [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updating instance_info_cache with network_info: [{"id": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "address": "fa:16:3e:db:4b:19", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b0c26b-f6", "ovs_interfaceid": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.014238] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362597, 'name': Rename_Task, 'duration_secs': 0.132752} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.014502] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.014753] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e99fc1ba-dd3d-46bf-8370-2ac0ff22cf68 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.021517] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 884.021517] env[61905]: value = "task-1362598" [ 884.021517] env[61905]: _type = "Task" [ 884.021517] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.033371] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.182715] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e0e34c-ef8d-1fbc-791e-d63f64b49814, 'name': SearchDatastore_Task, 'duration_secs': 0.036982} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.183048] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.183290] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.183524] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.183671] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.183847] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.184125] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f024067-f32c-4184-9034-8b64599f1df9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.192507] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.192655] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.193406] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fafe439a-4f96-464b-8099-c6b0e51ba948 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.198491] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 884.198491] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52adb776-81af-8064-b76d-2cea74331c57" [ 884.198491] env[61905]: _type = "Task" [ 884.198491] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.206036] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52adb776-81af-8064-b76d-2cea74331c57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.388062] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "299479fb-9a94-40b8-928d-8e491dbe1af1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.388378] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.388550] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "299479fb-9a94-40b8-928d-8e491dbe1af1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.388735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.388985] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.391322] env[61905]: INFO nova.compute.manager [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Terminating instance [ 884.393317] env[61905]: DEBUG nova.compute.manager [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 884.393513] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.394352] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793024bc-7140-40a1-8d38-1224cc9e1cf3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.403457] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.403721] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0f18564-1699-481d-b55f-e4fe54df9506 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.411374] env[61905]: DEBUG oslo_vmware.api [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 884.411374] env[61905]: value = "task-1362599" [ 884.411374] env[61905]: _type = "Task" [ 884.411374] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.419202] env[61905]: DEBUG oslo_vmware.api [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.482563] env[61905]: DEBUG nova.scheduler.client.report [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.511266] env[61905]: DEBUG oslo_concurrency.lockutils [req-3561c2c8-e812-4f4a-bbdd-14d192aa307c req-afdf494d-eff4-4679-a1c1-f3561bab4d13 service nova] Releasing lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.532509] env[61905]: DEBUG oslo_vmware.api [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362598, 'name': PowerOnVM_Task, 'duration_secs': 0.463782} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.532829] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.533096] env[61905]: INFO nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Took 8.89 seconds to spawn the instance on the hypervisor. [ 884.533336] env[61905]: DEBUG nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 884.534316] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9ee0c5-eb5a-4073-8627-a7b0bf16f42a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.594801] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 884.624157] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 884.624421] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 884.624598] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.624806] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 884.624957] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.625121] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 884.625392] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 884.625478] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 884.625642] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 884.625801] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 884.625995] env[61905]: DEBUG nova.virt.hardware [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 884.626945] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d45fe00-64c2-49e0-bafc-280c6db29cc1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.634898] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304650dc-12e0-43d6-9d3b-5142373871de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.708801] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52adb776-81af-8064-b76d-2cea74331c57, 'name': SearchDatastore_Task, 'duration_secs': 0.009372} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.710016] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43527920-3b09-4e83-9879-26faf0e1bdcb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.715752] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 884.715752] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5239f282-b098-f063-5bc3-47995f642676" [ 884.715752] env[61905]: _type = "Task" [ 884.715752] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.723478] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5239f282-b098-f063-5bc3-47995f642676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.921866] env[61905]: DEBUG oslo_vmware.api [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362599, 'name': PowerOffVM_Task, 'duration_secs': 0.177528} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.922251] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.922426] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.922681] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4628e094-964c-4cf6-a9cd-9027a775c690 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.952763] env[61905]: DEBUG nova.network.neutron [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Successfully updated port: c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.977940] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.978215] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.978405] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Deleting the datastore file [datastore1] 299479fb-9a94-40b8-928d-8e491dbe1af1 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.978662] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44402288-d752-4df1-8273-d88cc42e3eeb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.984689] env[61905]: DEBUG oslo_vmware.api [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for the task: (returnval){ [ 884.984689] env[61905]: value = "task-1362601" [ 884.984689] env[61905]: _type = "Task" [ 884.984689] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.990049] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.990495] env[61905]: DEBUG nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 884.992940] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.592s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.993168] env[61905]: DEBUG nova.objects.instance [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lazy-loading 'resources' on Instance uuid 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.998785] env[61905]: DEBUG oslo_vmware.api [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362601, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.051455] env[61905]: INFO nova.compute.manager [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Took 32.67 seconds to build instance. [ 885.226631] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5239f282-b098-f063-5bc3-47995f642676, 'name': SearchDatastore_Task, 'duration_secs': 0.009275} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.226978] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.227255] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/e9e00459-e685-431b-b194-cf426c7a743e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.227541] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-958c4bae-da8d-49f2-a3ef-3b414e8ad7fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.234835] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 885.234835] env[61905]: value = "task-1362602" [ 885.234835] env[61905]: _type = "Task" [ 885.234835] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.243206] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.264316] env[61905]: DEBUG nova.compute.manager [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Received event network-vif-plugged-c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.264639] env[61905]: DEBUG oslo_concurrency.lockutils [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] Acquiring lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.264742] env[61905]: DEBUG oslo_concurrency.lockutils [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.264947] env[61905]: DEBUG oslo_concurrency.lockutils [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.265076] env[61905]: DEBUG nova.compute.manager [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] No waiting events found dispatching network-vif-plugged-c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 885.265248] env[61905]: WARNING nova.compute.manager [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Received unexpected event network-vif-plugged-c2766fd3-662b-45c8-b9c8-765f49bd1701 for instance with vm_state building and task_state spawning. [ 885.265409] env[61905]: DEBUG nova.compute.manager [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Received event network-changed-c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.265566] env[61905]: DEBUG nova.compute.manager [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Refreshing instance network info cache due to event network-changed-c2766fd3-662b-45c8-b9c8-765f49bd1701. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 885.265748] env[61905]: DEBUG oslo_concurrency.lockutils [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] Acquiring lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.265901] env[61905]: DEBUG oslo_concurrency.lockutils [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] Acquired lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.266106] env[61905]: DEBUG nova.network.neutron [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Refreshing network info cache for port c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 885.455412] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.500511] env[61905]: DEBUG nova.compute.utils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 885.510985] env[61905]: DEBUG nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 885.510985] env[61905]: DEBUG oslo_vmware.api [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Task: {'id': task-1362601, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133138} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.510985] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.510985] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.510985] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.510985] env[61905]: INFO nova.compute.manager [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 885.510985] env[61905]: DEBUG oslo.service.loopingcall [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.510985] env[61905]: DEBUG nova.compute.manager [-] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 885.510985] env[61905]: DEBUG nova.network.neutron [-] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 885.556836] env[61905]: DEBUG oslo_concurrency.lockutils [None req-22beff27-fb9b-4b5a-a7a8-b0cab6d3c0c5 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.567s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.753889] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362602, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.811536] env[61905]: DEBUG nova.network.neutron [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.868580] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec469ca-0936-4dbc-978c-7254699db174 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.878426] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7172298f-0dcf-4e8e-bf0b-31ac4822dd0e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.921497] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d0428d-f318-4799-a900-22ebc69d86fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.927386] env[61905]: DEBUG nova.network.neutron [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.930015] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7cd59f-d8d6-4fd1-aee9-3f016a86882e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.944734] env[61905]: DEBUG nova.compute.provider_tree [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 886.010172] env[61905]: DEBUG nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 886.249603] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537314} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.249779] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/e9e00459-e685-431b-b194-cf426c7a743e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.250031] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.250295] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74fed82d-2263-4054-8614-dfd9e14005e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.256700] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 886.256700] env[61905]: value = "task-1362603" [ 886.256700] env[61905]: _type = "Task" [ 886.256700] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.938790] env[61905]: DEBUG nova.network.neutron [-] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.940117] env[61905]: DEBUG oslo_concurrency.lockutils [req-098137b4-dabe-4682-99e2-d27954da38f2 req-81f48a59-723d-4767-8a5a-7064e61a1b20 service nova] Releasing lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.940947] env[61905]: DEBUG nova.scheduler.client.report [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.949759] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquired lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.949920] env[61905]: DEBUG nova.network.neutron [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.951303] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.951552] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.956049] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362603, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080839} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.956305] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 886.957109] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6707a77-8ef5-472c-a70c-fde934112bbc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.980517] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/e9e00459-e685-431b-b194-cf426c7a743e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.981133] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a865bd46-33a1-4094-a9ac-6f944af6d51d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.002345] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 887.002345] env[61905]: value = "task-1362604" [ 887.002345] env[61905]: _type = "Task" [ 887.002345] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.011195] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362604, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.291382] env[61905]: DEBUG nova.compute.manager [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 887.291573] env[61905]: DEBUG nova.compute.manager [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing instance network info cache due to event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 887.291790] env[61905]: DEBUG oslo_concurrency.lockutils [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.291938] env[61905]: DEBUG oslo_concurrency.lockutils [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.292117] env[61905]: DEBUG nova.network.neutron [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.450515] env[61905]: INFO nova.compute.manager [-] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Took 1.94 seconds to deallocate network for instance. [ 887.451403] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.458s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.454149] env[61905]: DEBUG nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 887.457745] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.895s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.459309] env[61905]: INFO nova.compute.claims [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.463982] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 887.480374] env[61905]: INFO nova.scheduler.client.report [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Deleted allocations for instance 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e [ 887.483374] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 887.483667] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 887.483742] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.483933] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 887.484099] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.484939] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 887.484939] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 887.484939] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 887.485594] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 887.485594] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 887.485594] env[61905]: DEBUG nova.virt.hardware [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.489656] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bc436f-7823-413e-bb22-3573e0ac83f8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.497889] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1342fe-4d01-49cd-96d5-cbb03fa8f34f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.502781] env[61905]: DEBUG nova.network.neutron [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.517733] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 887.523229] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Creating folder: Project (6a8eaf52ab6b4282953d6cd8d22dd83f). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.527532] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b2d1e66-b4f8-429f-a318-a8f2e97b834d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.532592] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362604, 'name': ReconfigVM_Task, 'duration_secs': 0.357884} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.533173] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Reconfigured VM instance instance-0000004e to attach disk [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/e9e00459-e685-431b-b194-cf426c7a743e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.533818] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79fe2ef5-19c2-4ee3-b5fa-65d4ab1aedac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.541963] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 887.541963] env[61905]: value = "task-1362606" [ 887.541963] env[61905]: _type = "Task" [ 887.541963] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.542201] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Created folder: Project (6a8eaf52ab6b4282953d6cd8d22dd83f) in parent group-v289968. [ 887.542370] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Creating folder: Instances. Parent ref: group-v290054. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.542659] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa4cb3db-0656-4077-8e54-bfad22cedb49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.552687] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362606, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.561793] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Created folder: Instances in parent group-v290054. [ 887.562097] env[61905]: DEBUG oslo.service.loopingcall [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.562620] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 887.562890] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b599531b-951b-4e86-bdb8-a4f55000109d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.580254] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 887.580254] env[61905]: value = "task-1362608" [ 887.580254] env[61905]: _type = "Task" [ 887.580254] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.591186] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362608, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.692836] env[61905]: DEBUG nova.network.neutron [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Updating instance_info_cache with network_info: [{"id": "c2766fd3-662b-45c8-b9c8-765f49bd1701", "address": "fa:16:3e:2d:35:1e", "network": {"id": "f09e2fca-89a4-4442-b047-5eaf65657fea", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1606164832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c6a827d04af4d979146be16cd3517db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2766fd3-66", "ovs_interfaceid": "c2766fd3-662b-45c8-b9c8-765f49bd1701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.976351] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.990392] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.995810] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7265f4af-6b62-4f1d-a4cf-4ad4b16f89e9 tempest-ServersNegativeTestMultiTenantJSON-1026996899 tempest-ServersNegativeTestMultiTenantJSON-1026996899-project-member] Lock "020f97b7-e3e4-44e1-9ad2-97e3ed671f7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.679s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.052456] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362606, 'name': Rename_Task, 'duration_secs': 0.147462} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.052727] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.052969] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1cd73ac-89e0-4a3b-8956-3191251013c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.059468] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 888.059468] env[61905]: value = "task-1362609" [ 888.059468] env[61905]: _type = "Task" [ 888.059468] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.066954] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.072271] env[61905]: DEBUG nova.network.neutron [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updated VIF entry in instance network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.072623] env[61905]: DEBUG nova.network.neutron [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.090526] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362608, 'name': CreateVM_Task, 'duration_secs': 0.321204} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.090681] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.091115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.091318] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.091646] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 888.091897] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c499c70-ee95-441e-ad26-6fc2360883a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.096224] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 888.096224] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5263bdc8-8902-391c-00aa-04e4847319b1" [ 888.096224] env[61905]: _type = "Task" [ 888.096224] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.104410] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5263bdc8-8902-391c-00aa-04e4847319b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.196082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Releasing lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.196447] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Instance network_info: |[{"id": "c2766fd3-662b-45c8-b9c8-765f49bd1701", "address": "fa:16:3e:2d:35:1e", "network": {"id": "f09e2fca-89a4-4442-b047-5eaf65657fea", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1606164832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c6a827d04af4d979146be16cd3517db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2766fd3-66", "ovs_interfaceid": "c2766fd3-662b-45c8-b9c8-765f49bd1701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 888.197011] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:35:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2766fd3-662b-45c8-b9c8-765f49bd1701', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.204503] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Creating folder: Project (8c6a827d04af4d979146be16cd3517db). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.204811] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d342437-76db-40bf-bafb-e8c91603eb33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.216574] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Created folder: Project (8c6a827d04af4d979146be16cd3517db) in parent group-v289968. [ 888.216763] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Creating folder: Instances. Parent ref: group-v290057. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 888.217024] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2f26e78-c6df-4ce9-b3f0-7584cd1feb99 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.225870] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Created folder: Instances in parent group-v290057. [ 888.226117] env[61905]: DEBUG oslo.service.loopingcall [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.226308] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.226505] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a8ee9783-1bca-4721-a3f4-e2f9cd4da4d2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.246222] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.246222] env[61905]: value = "task-1362612" [ 888.246222] env[61905]: _type = "Task" [ 888.246222] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.255940] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362612, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.570841] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362609, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.575239] env[61905]: DEBUG oslo_concurrency.lockutils [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.575505] env[61905]: DEBUG nova.compute.manager [req-f55390fc-0b72-4e24-8693-e8da9f1fb458 req-a379e35c-e709-46d2-8fdc-8f318c4d3897 service nova] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Received event network-vif-deleted-0f772b14-7fff-42ea-9082-d9759393537d {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.609121] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5263bdc8-8902-391c-00aa-04e4847319b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.609602] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.609850] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.610120] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.610276] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.610454] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.610711] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ace47f1c-88d7-46f8-91c5-f40fe9f90f3f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.620050] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.620253] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.620986] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e443b6f6-271a-4fa3-af74-3c23c200fc15 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.625933] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 888.625933] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5289f7da-4601-647d-3cb8-33ef548ab0e4" [ 888.625933] env[61905]: _type = "Task" [ 888.625933] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.636320] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5289f7da-4601-647d-3cb8-33ef548ab0e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.755975] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362612, 'name': CreateVM_Task, 'duration_secs': 0.403517} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.758582] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.759636] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.760029] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.760453] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 888.760807] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f613339-7160-4c9e-80ca-7e273d4a9362 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.765675] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 888.765675] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cdeb05-bdf5-4b73-f9dd-0d513d56e481" [ 888.765675] env[61905]: _type = "Task" [ 888.765675] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.778171] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cdeb05-bdf5-4b73-f9dd-0d513d56e481, 'name': SearchDatastore_Task, 'duration_secs': 0.009167} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.778605] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.779030] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.779434] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.787017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4639099-02cb-4144-b138-eac177ae40d2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.794017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f4ba67-f01a-4880-8519-652fec1a7a02 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.823236] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b1a0b5-809a-4b36-8138-0cecee512a82 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.830829] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4738aea6-df4e-48cc-a881-a044187d493f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.845948] env[61905]: DEBUG nova.compute.provider_tree [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.072518] env[61905]: DEBUG oslo_vmware.api [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362609, 'name': PowerOnVM_Task, 'duration_secs': 0.680465} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.072766] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.072948] env[61905]: INFO nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Took 8.89 seconds to spawn the instance on the hypervisor. [ 889.073138] env[61905]: DEBUG nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 889.073921] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8816200-bd04-4c82-8695-7fcd76186f4b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.138777] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5289f7da-4601-647d-3cb8-33ef548ab0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009756} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.139595] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dd63c8a-24e7-42d7-8cd6-98cf09439498 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.155962] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 889.155962] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b2b96e-040d-ee04-0fdd-658c92368491" [ 889.155962] env[61905]: _type = "Task" [ 889.155962] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.164269] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b2b96e-040d-ee04-0fdd-658c92368491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.350318] env[61905]: DEBUG nova.scheduler.client.report [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.594937] env[61905]: INFO nova.compute.manager [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Took 27.25 seconds to build instance. [ 889.666691] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b2b96e-040d-ee04-0fdd-658c92368491, 'name': SearchDatastore_Task, 'duration_secs': 0.023686} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.666955] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.667180] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.667499] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.667691] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.667908] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ab391b7-bd0f-441d-bc1e-7cb94ab86177 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.669882] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ff15dcc-8b2b-4620-b8fd-ab2fcc3910c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.677416] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 889.677416] env[61905]: value = "task-1362613" [ 889.677416] env[61905]: _type = "Task" [ 889.677416] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.681571] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.681829] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 889.682977] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde928a6-0d5f-4e88-924c-7ccd177cc4d7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.688878] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.692670] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 889.692670] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5213cc4d-894a-fcdb-c281-f9be155a4f1a" [ 889.692670] env[61905]: _type = "Task" [ 889.692670] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.703944] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5213cc4d-894a-fcdb-c281-f9be155a4f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.855993] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.856585] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 889.863020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.298s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.863020] env[61905]: INFO nova.compute.claims [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.097758] env[61905]: DEBUG oslo_concurrency.lockutils [None req-aae516f7-80c1-4c2e-a3e4-77230b2037fb tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "e9e00459-e685-431b-b194-cf426c7a743e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.159s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.188238] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362613, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.205025] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5213cc4d-894a-fcdb-c281-f9be155a4f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.021323} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.205025] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad9e2f95-4b31-478d-a915-9cf1400f5c96 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.209818] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 890.209818] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52eca8be-8a0c-953a-647a-04fb89c26bf1" [ 890.209818] env[61905]: _type = "Task" [ 890.209818] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.223702] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52eca8be-8a0c-953a-647a-04fb89c26bf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.369963] env[61905]: DEBUG nova.compute.utils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.371418] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 890.371578] env[61905]: DEBUG nova.network.neutron [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 890.429806] env[61905]: DEBUG nova.policy [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b33029a3a5374cd9ae9b795f390dc39b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cb369144a2b44df9fbc5552ec50697a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 890.687650] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565365} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.687907] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.688190] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.688395] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4499e8e-3d1b-4536-a767-b7858a8f18ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.694817] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 890.694817] env[61905]: value = "task-1362614" [ 890.694817] env[61905]: _type = "Task" [ 890.694817] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.701976] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.722799] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52eca8be-8a0c-953a-647a-04fb89c26bf1, 'name': SearchDatastore_Task, 'duration_secs': 0.021619} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.725059] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.725511] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 27c3ed56-d24e-47d1-9c39-43b3b88a59b9/27c3ed56-d24e-47d1-9c39-43b3b88a59b9.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 890.725777] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5b38be9-c38c-495c-8847-86def80f8662 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.735114] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 890.735114] env[61905]: value = "task-1362615" [ 890.735114] env[61905]: _type = "Task" [ 890.735114] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.746976] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.747632] env[61905]: DEBUG nova.network.neutron [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Successfully created port: 55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.779407] env[61905]: INFO nova.compute.manager [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Rescuing [ 890.779630] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.779803] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.780012] env[61905]: DEBUG nova.network.neutron [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.879166] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 891.194238] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ddb729-8bee-4c48-bff5-5b3dd151c9b0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.211834] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08873b94-833b-497a-9147-136d09364ddc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.215699] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065371} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.216060] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.217432] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf25cd6-fbf2-458d-b796-799d5a27ff5d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.250100] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4ae338-5b3e-4b18-8887-42e0ef88982c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.268026] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.268955] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99ff9be4-a271-4de0-80ff-af470b1f4dfc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.290420] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362615, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.292092] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472ebb6e-bd39-452e-be60-21d3463e8861 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.297322] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 891.297322] env[61905]: value = "task-1362616" [ 891.297322] env[61905]: _type = "Task" [ 891.297322] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.306015] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362616, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.314159] env[61905]: DEBUG nova.compute.provider_tree [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.662348] env[61905]: DEBUG nova.network.neutron [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updating instance_info_cache with network_info: [{"id": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "address": "fa:16:3e:db:4b:19", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b0c26b-f6", "ovs_interfaceid": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.755568] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547293} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.755930] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 27c3ed56-d24e-47d1-9c39-43b3b88a59b9/27c3ed56-d24e-47d1-9c39-43b3b88a59b9.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.756135] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.756404] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b02cba22-473f-41b8-947a-6c37e51ba374 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.763269] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 891.763269] env[61905]: value = "task-1362617" [ 891.763269] env[61905]: _type = "Task" [ 891.763269] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.771642] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.808023] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362616, 'name': ReconfigVM_Task, 'duration_secs': 0.309841} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.808704] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Reconfigured VM instance instance-00000050 to attach disk [datastore1] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.809113] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f97fd530-6164-49ac-9aae-5c1ae3f61b72 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.815538] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 891.815538] env[61905]: value = "task-1362618" [ 891.815538] env[61905]: _type = "Task" [ 891.815538] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.818996] env[61905]: DEBUG nova.scheduler.client.report [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.828016] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362618, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.889889] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 891.920124] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 891.921045] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 891.921297] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.921609] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 891.921677] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.921807] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 891.922015] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 891.922184] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 891.922353] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 891.922521] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 891.922687] env[61905]: DEBUG nova.virt.hardware [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.923761] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dff78c-12e3-4167-bcbc-67472c95b542 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.932478] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd77cf5e-747e-45d8-95a2-860f559ec248 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.166040] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.274580] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076491} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.274948] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.275911] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1c4d98-c4fd-4293-a7af-473591419692 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.304420] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 27c3ed56-d24e-47d1-9c39-43b3b88a59b9/27c3ed56-d24e-47d1-9c39-43b3b88a59b9.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.306100] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69d8593e-f418-4e0e-bd3d-2f8e4bcfb7d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.322883] env[61905]: DEBUG nova.compute.manager [req-ac165716-4422-4abd-8293-ae925bd94867 req-b2b31068-c92b-49f4-a312-7101d1848b9b service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Received event network-vif-plugged-55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.323122] env[61905]: DEBUG oslo_concurrency.lockutils [req-ac165716-4422-4abd-8293-ae925bd94867 req-b2b31068-c92b-49f4-a312-7101d1848b9b service nova] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.323341] env[61905]: DEBUG oslo_concurrency.lockutils [req-ac165716-4422-4abd-8293-ae925bd94867 req-b2b31068-c92b-49f4-a312-7101d1848b9b service nova] Lock "7b0db0a2-c990-4160-9be8-018239425114-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.323509] env[61905]: DEBUG oslo_concurrency.lockutils [req-ac165716-4422-4abd-8293-ae925bd94867 req-b2b31068-c92b-49f4-a312-7101d1848b9b service nova] Lock "7b0db0a2-c990-4160-9be8-018239425114-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.323678] env[61905]: DEBUG nova.compute.manager [req-ac165716-4422-4abd-8293-ae925bd94867 req-b2b31068-c92b-49f4-a312-7101d1848b9b service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] No waiting events found dispatching network-vif-plugged-55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 892.323843] env[61905]: WARNING nova.compute.manager [req-ac165716-4422-4abd-8293-ae925bd94867 req-b2b31068-c92b-49f4-a312-7101d1848b9b service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Received unexpected event network-vif-plugged-55782888-9c3d-4f40-852f-9cff30eb514b for instance with vm_state building and task_state spawning. [ 892.324977] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.325436] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 892.331464] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.596s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.331670] env[61905]: DEBUG nova.objects.instance [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 892.336628] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 892.336628] env[61905]: value = "task-1362619" [ 892.336628] env[61905]: _type = "Task" [ 892.336628] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.340128] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362618, 'name': Rename_Task, 'duration_secs': 0.138176} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.343598] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.343852] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd1d6793-e147-4ae7-8125-a8ed0cb2e6f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.350664] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362619, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.352143] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 892.352143] env[61905]: value = "task-1362620" [ 892.352143] env[61905]: _type = "Task" [ 892.352143] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.362108] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.417233] env[61905]: DEBUG nova.network.neutron [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Successfully updated port: 55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.700137] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.700429] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5392dda7-38bf-4bd4-b2d8-947b4ef62d1f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.707144] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 892.707144] env[61905]: value = "task-1362621" [ 892.707144] env[61905]: _type = "Task" [ 892.707144] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.715437] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.839207] env[61905]: DEBUG nova.compute.utils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.840607] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 892.840770] env[61905]: DEBUG nova.network.neutron [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 892.852500] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362619, 'name': ReconfigVM_Task, 'duration_secs': 0.466623} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.852900] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 27c3ed56-d24e-47d1-9c39-43b3b88a59b9/27c3ed56-d24e-47d1-9c39-43b3b88a59b9.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.856333] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24943d82-fe32-46b8-9eb3-aed11d20f638 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.864331] env[61905]: DEBUG oslo_vmware.api [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362620, 'name': PowerOnVM_Task, 'duration_secs': 0.476149} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.865560] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.865788] env[61905]: INFO nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Took 5.41 seconds to spawn the instance on the hypervisor. [ 892.865987] env[61905]: DEBUG nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 892.866325] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 892.866325] env[61905]: value = "task-1362622" [ 892.866325] env[61905]: _type = "Task" [ 892.866325] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.867028] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33b7aa2-2e53-4a0d-a9b2-47240eba1a47 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.882670] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362622, 'name': Rename_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.909567] env[61905]: DEBUG nova.policy [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ca67104cdbd4ac9be9a57bb19846925', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7af9072624d04f669e8183581e6ca50a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 892.919864] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.920093] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.920258] env[61905]: DEBUG nova.network.neutron [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.185111] env[61905]: DEBUG nova.network.neutron [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Successfully created port: be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.220940] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362621, 'name': PowerOffVM_Task, 'duration_secs': 0.275481} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.221409] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.222670] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b36eafd-6f22-4ba9-974f-fe343f3f8fd1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.243264] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06aecd54-5ab7-4ed1-a65e-d3ae49356c4f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.284023] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.284023] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59fbbf12-f381-44bf-939a-c22a111f5ac5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.291694] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 893.291694] env[61905]: value = "task-1362623" [ 893.291694] env[61905]: _type = "Task" [ 893.291694] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.301937] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] VM already powered off {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 893.302211] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.302523] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.302622] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.305469] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.305469] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-851aabea-2b7d-46fd-9b3f-7da70e7e8bad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.312332] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.312521] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.313251] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c12d2d42-5cf7-4d18-bdf1-d18619a4f6fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.318613] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 893.318613] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5285c40f-4f58-28e9-8d65-f0620e42a59d" [ 893.318613] env[61905]: _type = "Task" [ 893.318613] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.330316] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5285c40f-4f58-28e9-8d65-f0620e42a59d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.344019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e493e76a-876c-4ca0-823d-d464bfa8bd8b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.344019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.288s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.344019] env[61905]: DEBUG nova.objects.instance [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lazy-loading 'resources' on Instance uuid e1a22f3e-4557-44d2-8e34-cc75f573fe41 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.344700] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 893.379913] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362622, 'name': Rename_Task, 'duration_secs': 0.146247} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.381294] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.381674] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3375de29-a4d8-4b5f-978e-0060186ae9ad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.392104] env[61905]: INFO nova.compute.manager [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Took 22.76 seconds to build instance. [ 893.395831] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 893.395831] env[61905]: value = "task-1362624" [ 893.395831] env[61905]: _type = "Task" [ 893.395831] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.404822] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362624, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.469305] env[61905]: DEBUG nova.network.neutron [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.623859] env[61905]: DEBUG nova.network.neutron [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [{"id": "55782888-9c3d-4f40-852f-9cff30eb514b", "address": "fa:16:3e:6e:f1:7a", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55782888-9c", "ovs_interfaceid": "55782888-9c3d-4f40-852f-9cff30eb514b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.829545] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5285c40f-4f58-28e9-8d65-f0620e42a59d, 'name': SearchDatastore_Task, 'duration_secs': 0.034348} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.830567] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44586d78-35ef-4c31-a93a-4d811355d442 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.837685] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 893.837685] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526cda91-b891-f841-81fc-9fb90fe9b7df" [ 893.837685] env[61905]: _type = "Task" [ 893.837685] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.846368] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526cda91-b891-f841-81fc-9fb90fe9b7df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.895340] env[61905]: DEBUG oslo_concurrency.lockutils [None req-86a90e68-2113-4c83-b29d-6a6f9b1d6462 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.268s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.906187] env[61905]: DEBUG oslo_vmware.api [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362624, 'name': PowerOnVM_Task, 'duration_secs': 0.472701} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.908778] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.909025] env[61905]: INFO nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Took 9.31 seconds to spawn the instance on the hypervisor. [ 893.909210] env[61905]: DEBUG nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.910378] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6629dd-b75e-4caf-a8cb-457138314168 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.963225] env[61905]: INFO nova.compute.manager [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Rebuilding instance [ 894.008382] env[61905]: DEBUG nova.compute.manager [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 894.009495] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6f746a-c046-496a-aa91-f99934d3fdde {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.127708] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.127990] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Instance network_info: |[{"id": "55782888-9c3d-4f40-852f-9cff30eb514b", "address": "fa:16:3e:6e:f1:7a", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55782888-9c", "ovs_interfaceid": "55782888-9c3d-4f40-852f-9cff30eb514b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 894.128454] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:f1:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55782888-9c3d-4f40-852f-9cff30eb514b', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.135786] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Creating folder: Project (0cb369144a2b44df9fbc5552ec50697a). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.138397] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9be1ff9-f5db-462f-8574-0c19aee9656c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.150770] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Created folder: Project (0cb369144a2b44df9fbc5552ec50697a) in parent group-v289968. [ 894.151114] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Creating folder: Instances. Parent ref: group-v290060. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 894.151483] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f29d12e6-a2bf-41a5-a6df-47da7f45e461 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.154620] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8499fc-42c6-4663-9761-b6bf8cf225e5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.164362] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4717ef-0dc5-4bc0-a99f-c418cf53316d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.169733] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Created folder: Instances in parent group-v290060. [ 894.170147] env[61905]: DEBUG oslo.service.loopingcall [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.170893] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.171141] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0259145-99bd-4b00-82bd-1c394e93dca6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.211124] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29663eaa-5b84-4ceb-bfc7-e02275088747 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.214928] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.214928] env[61905]: value = "task-1362627" [ 894.214928] env[61905]: _type = "Task" [ 894.214928] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.220852] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e025a979-f4a1-4de1-9370-a9e86f778468 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.226876] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362627, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.236243] env[61905]: DEBUG nova.compute.provider_tree [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.330079] env[61905]: DEBUG nova.compute.manager [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Received event network-changed-55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 894.330372] env[61905]: DEBUG nova.compute.manager [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Refreshing instance network info cache due to event network-changed-55782888-9c3d-4f40-852f-9cff30eb514b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 894.330502] env[61905]: DEBUG oslo_concurrency.lockutils [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] Acquiring lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.330645] env[61905]: DEBUG oslo_concurrency.lockutils [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] Acquired lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.330805] env[61905]: DEBUG nova.network.neutron [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Refreshing network info cache for port 55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.349336] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526cda91-b891-f841-81fc-9fb90fe9b7df, 'name': SearchDatastore_Task, 'duration_secs': 0.019616} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.349613] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.349868] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk. {{(pid=61905) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 894.350140] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12639639-7cf5-4ddc-b105-fbeaeb303657 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.357233] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 894.359104] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 894.359104] env[61905]: value = "task-1362628" [ 894.359104] env[61905]: _type = "Task" [ 894.359104] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.367528] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.383720] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.383966] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.384141] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.384332] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.384483] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.384632] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.384838] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.384998] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.385190] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.385355] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.385527] env[61905]: DEBUG nova.virt.hardware [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.386352] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75b80e1-ddbb-48b1-a346-c18ac30e444a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.394268] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2083242d-6dba-4487-b328-7a1a95e9b7a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.428223] env[61905]: INFO nova.compute.manager [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Took 25.11 seconds to build instance. [ 894.521967] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.521967] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c461aceb-f582-4c86-bfb4-b8f5e6ebb301 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.528653] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 894.528653] env[61905]: value = "task-1362629" [ 894.528653] env[61905]: _type = "Task" [ 894.528653] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.538670] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.726215] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362627, 'name': CreateVM_Task, 'duration_secs': 0.485411} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.726215] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.726962] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.727285] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.727741] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 894.728144] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff8e37b5-2af8-4f3d-80dd-23db0f059537 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.733584] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 894.733584] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524463bb-c864-464a-114c-190608e621dc" [ 894.733584] env[61905]: _type = "Task" [ 894.733584] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.742030] env[61905]: DEBUG nova.scheduler.client.report [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.748756] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524463bb-c864-464a-114c-190608e621dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.846614] env[61905]: DEBUG nova.network.neutron [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Successfully updated port: be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.871902] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362628, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.930285] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8608805f-f3cb-42d3-8a3d-19ebbeb01aee tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.641s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.037663] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362629, 'name': PowerOffVM_Task, 'duration_secs': 0.302561} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.037927] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.038331] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 895.039085] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c773d8-5042-4ae8-a5fa-d52e973fbfb4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.045476] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.045699] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f11dd3d-99e7-44e1-ba88-8d75a83cbece {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.069634] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.070812] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.070812] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Deleting the datastore file [datastore1] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.070812] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc82035b-3041-420c-948b-cddcef1b0c31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.076971] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 895.076971] env[61905]: value = "task-1362631" [ 895.076971] env[61905]: _type = "Task" [ 895.076971] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.087167] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.115101] env[61905]: DEBUG nova.network.neutron [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updated VIF entry in instance network info cache for port 55782888-9c3d-4f40-852f-9cff30eb514b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.115573] env[61905]: DEBUG nova.network.neutron [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [{"id": "55782888-9c3d-4f40-852f-9cff30eb514b", "address": "fa:16:3e:6e:f1:7a", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55782888-9c", "ovs_interfaceid": "55782888-9c3d-4f40-852f-9cff30eb514b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.244228] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524463bb-c864-464a-114c-190608e621dc, 'name': SearchDatastore_Task, 'duration_secs': 0.056228} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.244488] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.244721] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.244952] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.245114] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.245293] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.245544] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c4718de-e286-4b72-9ce1-6b1b6653dc63 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.252028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.254112] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.212s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.254338] env[61905]: DEBUG nova.objects.instance [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lazy-loading 'resources' on Instance uuid 9a385d72-ba5d-48e0-b71f-d37d4e63c403 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.260123] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.260123] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.260826] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2f846c-df6d-4d31-bcb2-8decc3fb9434 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.267347] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 895.267347] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52127439-7553-1dc1-31ee-cc7da5f5a350" [ 895.267347] env[61905]: _type = "Task" [ 895.267347] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.276832] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52127439-7553-1dc1-31ee-cc7da5f5a350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.281355] env[61905]: INFO nova.scheduler.client.report [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted allocations for instance e1a22f3e-4557-44d2-8e34-cc75f573fe41 [ 895.350033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "refresh_cache-22b6d87c-08c5-492c-a963-f7ad6ef5db5b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.350033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "refresh_cache-22b6d87c-08c5-492c-a963-f7ad6ef5db5b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.350033] env[61905]: DEBUG nova.network.neutron [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.369735] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571915} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.370013] env[61905]: INFO nova.virt.vmwareapi.ds_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk. [ 895.370828] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3ae926-30fb-4472-b3c9-d4debce7f506 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.397569] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.397831] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4632ff0-39e5-46bf-97bf-211709120c0d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.416322] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 895.416322] env[61905]: value = "task-1362632" [ 895.416322] env[61905]: _type = "Task" [ 895.416322] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.425634] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362632, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.587467] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114298} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.587729] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.587913] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.588101] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.618582] env[61905]: DEBUG oslo_concurrency.lockutils [req-6990b863-a768-4904-a0a2-2b95473b021e req-55ae1b1b-3e3b-4f47-985a-b08912584d4f service nova] Releasing lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.707370] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.707649] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.707903] env[61905]: INFO nova.compute.manager [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Rebooting instance [ 895.778552] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52127439-7553-1dc1-31ee-cc7da5f5a350, 'name': SearchDatastore_Task, 'duration_secs': 0.010987} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.782446] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-401d4996-d019-4cae-9d7e-c840f37ab251 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.790324] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 895.790324] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e5d7a1-470c-ca78-61df-78883e1dc80c" [ 895.790324] env[61905]: _type = "Task" [ 895.790324] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.791560] env[61905]: DEBUG oslo_concurrency.lockutils [None req-20cbd3c2-e7f2-45fc-93c4-d75f8a6d3f73 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "e1a22f3e-4557-44d2-8e34-cc75f573fe41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.416s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.803471] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e5d7a1-470c-ca78-61df-78883e1dc80c, 'name': SearchDatastore_Task, 'duration_secs': 0.009504} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.803866] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.804027] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 7b0db0a2-c990-4160-9be8-018239425114/7b0db0a2-c990-4160-9be8-018239425114.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.804338] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-919e4943-ccd3-440a-8d01-9592b6e4270f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.813225] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 895.813225] env[61905]: value = "task-1362633" [ 895.813225] env[61905]: _type = "Task" [ 895.813225] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.822153] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.881501] env[61905]: DEBUG nova.network.neutron [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.928225] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362632, 'name': ReconfigVM_Task, 'duration_secs': 0.455594} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.928577] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Reconfigured VM instance instance-0000004e to attach disk [datastore1] e9e00459-e685-431b-b194-cf426c7a743e/4d166298-c700-4bc6-8f8f-67684a277053-rescue.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.929508] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a82ffc-a691-463b-909e-bc20b0661519 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.961905] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99d0774f-2878-4713-9daa-952f37d48803 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.983446] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 895.983446] env[61905]: value = "task-1362634" [ 895.983446] env[61905]: _type = "Task" [ 895.983446] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.996577] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362634, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.070726] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067560a1-5a19-431b-a281-e26188724f59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.078608] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a57895-14bd-47ee-8752-a1c934138af8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.115257] env[61905]: DEBUG nova.network.neutron [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Updating instance_info_cache with network_info: [{"id": "be53f19f-74d1-4618-a4d5-92af42f53641", "address": "fa:16:3e:e7:81:9b", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe53f19f-74", "ovs_interfaceid": "be53f19f-74d1-4618-a4d5-92af42f53641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.117986] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d227f135-6291-431f-9dd6-700200e5af81 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.126786] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49473fb6-f253-4eec-b772-fa5abfc3d378 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.142280] env[61905]: DEBUG nova.compute.provider_tree [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.235225] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.235538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquired lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.235786] env[61905]: DEBUG nova.network.neutron [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.323708] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362633, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.356933] env[61905]: DEBUG nova.compute.manager [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Received event network-vif-plugged-be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.357240] env[61905]: DEBUG oslo_concurrency.lockutils [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] Acquiring lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.357534] env[61905]: DEBUG oslo_concurrency.lockutils [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.357869] env[61905]: DEBUG oslo_concurrency.lockutils [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.357953] env[61905]: DEBUG nova.compute.manager [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] No waiting events found dispatching network-vif-plugged-be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 896.358237] env[61905]: WARNING nova.compute.manager [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Received unexpected event network-vif-plugged-be53f19f-74d1-4618-a4d5-92af42f53641 for instance with vm_state building and task_state spawning. [ 896.358364] env[61905]: DEBUG nova.compute.manager [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Received event network-changed-be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.358565] env[61905]: DEBUG nova.compute.manager [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Refreshing instance network info cache due to event network-changed-be53f19f-74d1-4618-a4d5-92af42f53641. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 896.358827] env[61905]: DEBUG oslo_concurrency.lockutils [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] Acquiring lock "refresh_cache-22b6d87c-08c5-492c-a963-f7ad6ef5db5b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.496578] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362634, 'name': ReconfigVM_Task, 'duration_secs': 0.196977} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.496860] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.497169] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3004d45-e6c9-48fe-b74a-4c8598fc64e0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.503451] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 896.503451] env[61905]: value = "task-1362635" [ 896.503451] env[61905]: _type = "Task" [ 896.503451] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.511333] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.624209] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "refresh_cache-22b6d87c-08c5-492c-a963-f7ad6ef5db5b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.624798] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Instance network_info: |[{"id": "be53f19f-74d1-4618-a4d5-92af42f53641", "address": "fa:16:3e:e7:81:9b", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe53f19f-74", "ovs_interfaceid": "be53f19f-74d1-4618-a4d5-92af42f53641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 896.624961] env[61905]: DEBUG oslo_concurrency.lockutils [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] Acquired lock "refresh_cache-22b6d87c-08c5-492c-a963-f7ad6ef5db5b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.625134] env[61905]: DEBUG nova.network.neutron [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Refreshing network info cache for port be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.626527] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:81:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be53f19f-74d1-4618-a4d5-92af42f53641', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.634649] env[61905]: DEBUG oslo.service.loopingcall [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.637794] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.638380] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49dba3cb-1716-4d94-91ff-b3270f573d72 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.657712] env[61905]: DEBUG nova.scheduler.client.report [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.666304] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.666304] env[61905]: value = "task-1362636" [ 896.666304] env[61905]: _type = "Task" [ 896.666304] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.668640] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.668934] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.669128] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.669361] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.669564] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.669745] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.669976] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.670928] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.670928] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.670928] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.670928] env[61905]: DEBUG nova.virt.hardware [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.672279] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8820f328-192d-434e-a668-36eb71c24795 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.691684] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0553dc-6f45-49af-89c8-baf2f995a272 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.695846] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362636, 'name': CreateVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.707310] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.713985] env[61905]: DEBUG oslo.service.loopingcall [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.714300] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.714644] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3a4c016-e822-484f-b510-3eba8bbc1c32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.735029] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.735029] env[61905]: value = "task-1362637" [ 896.735029] env[61905]: _type = "Task" [ 896.735029] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.744415] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362637, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.825300] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525799} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.825659] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 7b0db0a2-c990-4160-9be8-018239425114/7b0db0a2-c990-4160-9be8-018239425114.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.825842] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.826246] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97e2b8db-ea73-43d5-ad0e-87f86ad040ef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.835156] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 896.835156] env[61905]: value = "task-1362638" [ 896.835156] env[61905]: _type = "Task" [ 896.835156] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.843393] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.901992] env[61905]: DEBUG nova.network.neutron [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Updated VIF entry in instance network info cache for port be53f19f-74d1-4618-a4d5-92af42f53641. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.902416] env[61905]: DEBUG nova.network.neutron [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Updating instance_info_cache with network_info: [{"id": "be53f19f-74d1-4618-a4d5-92af42f53641", "address": "fa:16:3e:e7:81:9b", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe53f19f-74", "ovs_interfaceid": "be53f19f-74d1-4618-a4d5-92af42f53641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.012843] env[61905]: DEBUG oslo_vmware.api [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362635, 'name': PowerOnVM_Task, 'duration_secs': 0.506111} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.013394] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.015966] env[61905]: DEBUG nova.compute.manager [None req-b1fb70e3-5164-40ba-aeae-84593fb8a546 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.016865] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd76568-55ee-49c9-b5b8-5942c61afd14 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.054801] env[61905]: DEBUG nova.network.neutron [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Updating instance_info_cache with network_info: [{"id": "c2766fd3-662b-45c8-b9c8-765f49bd1701", "address": "fa:16:3e:2d:35:1e", "network": {"id": "f09e2fca-89a4-4442-b047-5eaf65657fea", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1606164832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c6a827d04af4d979146be16cd3517db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2766fd3-66", "ovs_interfaceid": "c2766fd3-662b-45c8-b9c8-765f49bd1701", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.163060] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.909s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.166605] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.586s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.166605] env[61905]: DEBUG nova.objects.instance [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lazy-loading 'resources' on Instance uuid 111d10e8-7e36-48b6-be45-2275c36fbee4 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.181565] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362636, 'name': CreateVM_Task, 'duration_secs': 0.382503} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.181798] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.182967] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.182967] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.182967] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 897.183825] env[61905]: INFO nova.scheduler.client.report [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Deleted allocations for instance 9a385d72-ba5d-48e0-b71f-d37d4e63c403 [ 897.184701] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f227eada-3fcb-4fbf-ad1b-d4ffd12361d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.191446] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 897.191446] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524e4401-d1d8-5e5c-207e-2ff5999b3e00" [ 897.191446] env[61905]: _type = "Task" [ 897.191446] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.201136] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524e4401-d1d8-5e5c-207e-2ff5999b3e00, 'name': SearchDatastore_Task, 'duration_secs': 0.008984} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.201417] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.201643] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.201877] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.202105] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.202221] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.202479] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ae15133-4c95-42ce-8bf4-520f7a0c81ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.210540] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.210729] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.211746] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e537a79c-b0bc-402b-9285-c4e772bac7e5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.217055] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 897.217055] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522ec5d6-d62a-9199-1c12-2c148475f031" [ 897.217055] env[61905]: _type = "Task" [ 897.217055] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.225064] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522ec5d6-d62a-9199-1c12-2c148475f031, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.244012] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362637, 'name': CreateVM_Task, 'duration_secs': 0.426183} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.244199] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.244670] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.245122] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.245211] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 897.245432] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81e92909-8486-4edf-998d-843bc8aa3cd2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.250235] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 897.250235] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529e0d7b-8751-d9ff-8638-e90524a60976" [ 897.250235] env[61905]: _type = "Task" [ 897.250235] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.258931] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529e0d7b-8751-d9ff-8638-e90524a60976, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.345024] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099338} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.345311] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.346108] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29081bd-49b2-4f02-94f2-9d9c9a22f1ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.370217] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 7b0db0a2-c990-4160-9be8-018239425114/7b0db0a2-c990-4160-9be8-018239425114.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.370573] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d43fb7d-dbe0-4c29-abde-a4a96647f319 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.392069] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 897.392069] env[61905]: value = "task-1362639" [ 897.392069] env[61905]: _type = "Task" [ 897.392069] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.404757] env[61905]: DEBUG oslo_concurrency.lockutils [req-122836fe-ac31-4c6f-8ace-57541918a265 req-9e17ccd8-fa25-451e-ad49-a0a0a4df06d1 service nova] Releasing lock "refresh_cache-22b6d87c-08c5-492c-a963-f7ad6ef5db5b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.415624] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362639, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.557517] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Releasing lock "refresh_cache-27c3ed56-d24e-47d1-9c39-43b3b88a59b9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.559478] env[61905]: DEBUG nova.compute.manager [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.560375] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a27f95e-be97-47a3-b670-1396e863cc11 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.695288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-dc5267b5-b187-47e4-9338-84755b911f23 tempest-SecurityGroupsTestJSON-908414919 tempest-SecurityGroupsTestJSON-908414919-project-member] Lock "9a385d72-ba5d-48e0-b71f-d37d4e63c403" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.805s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.727078] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522ec5d6-d62a-9199-1c12-2c148475f031, 'name': SearchDatastore_Task, 'duration_secs': 0.011014} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.730254] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e526366-c636-4000-99e0-ed20654bf37a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.735350] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 897.735350] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283d5de-95c0-f3ea-4737-e84c61031b9c" [ 897.735350] env[61905]: _type = "Task" [ 897.735350] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.744260] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283d5de-95c0-f3ea-4737-e84c61031b9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.760889] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529e0d7b-8751-d9ff-8638-e90524a60976, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.763553] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.763891] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.764172] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.764327] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.764508] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.764926] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42e53c71-cead-4fba-a43e-830ab00cf69f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.772450] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.772629] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.773330] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cacfe9ef-3235-48f4-b0c7-d64ea98495e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.780477] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 897.780477] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52dbaa5a-dc67-93ab-3611-ab3ff8b8b7d5" [ 897.780477] env[61905]: _type = "Task" [ 897.780477] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.787849] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52dbaa5a-dc67-93ab-3611-ab3ff8b8b7d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.806667] env[61905]: INFO nova.compute.manager [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Unrescuing [ 897.807016] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.807299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquired lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.807574] env[61905]: DEBUG nova.network.neutron [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 897.901172] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362639, 'name': ReconfigVM_Task, 'duration_secs': 0.320563} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.903541] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 7b0db0a2-c990-4160-9be8-018239425114/7b0db0a2-c990-4160-9be8-018239425114.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 897.904368] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37156ac2-be39-4f9b-a2cc-096747f3f9b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.910596] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 897.910596] env[61905]: value = "task-1362640" [ 897.910596] env[61905]: _type = "Task" [ 897.910596] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.914709] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1285b9d8-06d3-4f2f-b595-b7a668ed878b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.921830] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362640, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.924267] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4957c453-00db-425b-baec-a7b52d018ae8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.955512] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7822ff8-0076-4a3d-a99f-7e6d4064dd76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.963019] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435e8379-5a31-4e95-94fc-dbab4f6a146a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.978487] env[61905]: DEBUG nova.compute.provider_tree [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.247268] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283d5de-95c0-f3ea-4737-e84c61031b9c, 'name': SearchDatastore_Task, 'duration_secs': 0.008883} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.247478] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.247745] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 22b6d87c-08c5-492c-a963-f7ad6ef5db5b/22b6d87c-08c5-492c-a963-f7ad6ef5db5b.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.248047] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3341645-658f-4758-bc4c-26783a15b026 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.254882] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 898.254882] env[61905]: value = "task-1362641" [ 898.254882] env[61905]: _type = "Task" [ 898.254882] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.262170] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.290346] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52dbaa5a-dc67-93ab-3611-ab3ff8b8b7d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009873} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.291130] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80f82f4c-36ff-4f7b-8721-56937eac5ba4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.296104] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 898.296104] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529c2b23-0c26-72e3-2614-419aa35f63b9" [ 898.296104] env[61905]: _type = "Task" [ 898.296104] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.303633] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529c2b23-0c26-72e3-2614-419aa35f63b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.420723] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362640, 'name': Rename_Task, 'duration_secs': 0.147694} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.421667] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.421667] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8df2359-81c9-4eb5-af07-3e7d8170a0cb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.427984] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 898.427984] env[61905]: value = "task-1362642" [ 898.427984] env[61905]: _type = "Task" [ 898.427984] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.435966] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.481981] env[61905]: DEBUG nova.scheduler.client.report [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.577944] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f588c9-155b-476e-8c18-c7117ec5d7a4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.586606] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Doing hard reboot of VM {{(pid=61905) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 898.586791] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-aefa18f9-c158-45f7-a049-87ea64d01afc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.592812] env[61905]: DEBUG oslo_vmware.api [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 898.592812] env[61905]: value = "task-1362643" [ 898.592812] env[61905]: _type = "Task" [ 898.592812] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.600910] env[61905]: DEBUG oslo_vmware.api [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362643, 'name': ResetVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.602037] env[61905]: DEBUG nova.network.neutron [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updating instance_info_cache with network_info: [{"id": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "address": "fa:16:3e:db:4b:19", "network": {"id": "b90ef521-dcde-44ad-a904-d46b0a8846bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-778391900-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "40c35a175eec4445817a2860c1f5770d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b0c26b-f6", "ovs_interfaceid": "a4b0c26b-f66a-462b-8be3-1f8271de80e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.767082] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362641, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.811568] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529c2b23-0c26-72e3-2614-419aa35f63b9, 'name': SearchDatastore_Task, 'duration_secs': 0.01563} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.812283] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.812806] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.813188] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-accec7cf-1b04-4bbf-a031-26dbf1878090 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.822406] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 898.822406] env[61905]: value = "task-1362644" [ 898.822406] env[61905]: _type = "Task" [ 898.822406] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.832190] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362644, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.940251] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362642, 'name': PowerOnVM_Task} progress is 71%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.988824] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.991244] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.139s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.991486] env[61905]: DEBUG nova.objects.instance [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lazy-loading 'resources' on Instance uuid 74f94a46-63e4-44e0-9142-7e7d46cd31a7 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.019318] env[61905]: INFO nova.scheduler.client.report [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleted allocations for instance 111d10e8-7e36-48b6-be45-2275c36fbee4 [ 899.103910] env[61905]: DEBUG oslo_vmware.api [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362643, 'name': ResetVM_Task, 'duration_secs': 0.105186} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.104477] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Releasing lock "refresh_cache-e9e00459-e685-431b-b194-cf426c7a743e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.105148] env[61905]: DEBUG nova.objects.instance [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lazy-loading 'flavor' on Instance uuid e9e00459-e685-431b-b194-cf426c7a743e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.106566] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Did hard reboot of VM {{(pid=61905) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 899.106763] env[61905]: DEBUG nova.compute.manager [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.107773] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebc6478-ddd7-4dd7-89e2-1fa29b2a7f0f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.267471] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.716106} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.267895] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 22b6d87c-08c5-492c-a963-f7ad6ef5db5b/22b6d87c-08c5-492c-a963-f7ad6ef5db5b.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.268210] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.268546] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-238e1bd1-24c6-4083-9e45-c75e90faae6c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.276076] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 899.276076] env[61905]: value = "task-1362645" [ 899.276076] env[61905]: _type = "Task" [ 899.276076] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.286461] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362645, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.337348] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362644, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.438863] env[61905]: DEBUG oslo_vmware.api [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362642, 'name': PowerOnVM_Task, 'duration_secs': 0.776309} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.439163] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.439376] env[61905]: INFO nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Took 7.55 seconds to spawn the instance on the hypervisor. [ 899.439559] env[61905]: DEBUG nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.440354] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd16174-1a2e-4cba-8589-1ef5810d676e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.527529] env[61905]: DEBUG oslo_concurrency.lockutils [None req-54d0d3e0-9409-4f67-b00c-f048dd0f283c tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "111d10e8-7e36-48b6-be45-2275c36fbee4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.572s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.617134] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5d31ae-2d80-4178-9c9e-7ffd101416e5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.623718] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8a5f96e5-abf8-4e8f-ae2a-307788dbb67e tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.916s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.647685] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.649268] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-814e5c18-e231-4f1c-854e-53c8c7a43c3a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.657041] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 899.657041] env[61905]: value = "task-1362646" [ 899.657041] env[61905]: _type = "Task" [ 899.657041] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.667942] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.758331] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7dc3aa-1ee7-401a-8115-4afe980370df {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.766351] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663c7c9b-a2bc-4274-a3f0-25c3649c8c49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.805897] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344a6b5f-0854-41fb-aad0-ea1bf927be2b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.817352] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4dd041a-1731-480d-b1f0-0c3a0587ebb9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.822497] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362645, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078599} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.822818] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.823942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc657513-41b2-4ac0-8c3e-7cd064c83c4d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.834260] env[61905]: DEBUG nova.compute.provider_tree [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.843966] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362644, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572055} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.853516] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.853763] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.862390] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 22b6d87c-08c5-492c-a963-f7ad6ef5db5b/22b6d87c-08c5-492c-a963-f7ad6ef5db5b.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.863504] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f5bf1cc-b6bb-485a-8e0a-83f9e5b8de19 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.865613] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cf14575-df61-41d4-a597-2e39ddc80d64 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.886100] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 899.886100] env[61905]: value = "task-1362647" [ 899.886100] env[61905]: _type = "Task" [ 899.886100] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.887454] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 899.887454] env[61905]: value = "task-1362648" [ 899.887454] env[61905]: _type = "Task" [ 899.887454] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.898480] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362647, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.901586] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362648, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.960831] env[61905]: INFO nova.compute.manager [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Took 27.44 seconds to build instance. [ 900.167253] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362646, 'name': PowerOffVM_Task, 'duration_secs': 0.3533} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.167551] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.172928] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 900.173237] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ad8dc60-2e72-4ba2-b291-d2b730a61bb6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.191523] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 900.191523] env[61905]: value = "task-1362649" [ 900.191523] env[61905]: _type = "Task" [ 900.191523] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.201685] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362649, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.343099] env[61905]: DEBUG nova.scheduler.client.report [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.400737] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362647, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122233} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.404149] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.404484] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.405301] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb128090-43cd-4084-a2f8-c6a6ffc83b98 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.425761] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.426144] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fd799f8-bbdf-49eb-8e73-dbddd817585b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.447975] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.448357] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.448532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.448843] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.449113] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.450881] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 900.450881] env[61905]: value = "task-1362650" [ 900.450881] env[61905]: _type = "Task" [ 900.450881] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.451351] env[61905]: INFO nova.compute.manager [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Terminating instance [ 900.453375] env[61905]: DEBUG nova.compute.manager [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 900.453568] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.457443] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c07a46-5a9d-4719-a4ef-e2bb82824e94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.465894] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a934a2d8-8d78-488b-ac64-8e7962a295d5 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.976s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.466136] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362650, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.468786] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.469263] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33d2f070-d52e-45d6-b13b-d679823ba797 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.476572] env[61905]: DEBUG oslo_vmware.api [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 900.476572] env[61905]: value = "task-1362651" [ 900.476572] env[61905]: _type = "Task" [ 900.476572] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.485543] env[61905]: DEBUG oslo_vmware.api [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362651, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.704024] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362649, 'name': ReconfigVM_Task, 'duration_secs': 0.340792} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.704024] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 900.704024] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.704024] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8245b65c-51ab-4b6e-8202-c0ff7b25cc70 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.708384] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 900.708384] env[61905]: value = "task-1362652" [ 900.708384] env[61905]: _type = "Task" [ 900.708384] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.717845] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362652, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.848018] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.850547] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.499s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.850809] env[61905]: DEBUG nova.objects.instance [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lazy-loading 'resources' on Instance uuid 4bb7a2df-b472-4f6d-8a01-a55d0b86efda {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.881827] env[61905]: INFO nova.scheduler.client.report [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Deleted allocations for instance 74f94a46-63e4-44e0-9142-7e7d46cd31a7 [ 900.903127] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362648, 'name': ReconfigVM_Task, 'duration_secs': 0.803749} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.903447] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 22b6d87c-08c5-492c-a963-f7ad6ef5db5b/22b6d87c-08c5-492c-a963-f7ad6ef5db5b.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.904200] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7395ab5f-e911-41b6-b311-2eccb33951cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.912811] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 900.912811] env[61905]: value = "task-1362653" [ 900.912811] env[61905]: _type = "Task" [ 900.912811] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.923459] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362653, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.963702] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362650, 'name': ReconfigVM_Task, 'duration_secs': 0.342858} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.963772] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Reconfigured VM instance instance-00000050 to attach disk [datastore2] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0/ebf7849c-716f-4b4c-bb9c-42c090d0b3c0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.964418] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5e36676-d739-47fc-bd6a-e5ce92d00ef5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.972691] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 900.972691] env[61905]: value = "task-1362654" [ 900.972691] env[61905]: _type = "Task" [ 900.972691] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.992100] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362654, 'name': Rename_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.996022] env[61905]: DEBUG oslo_vmware.api [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362651, 'name': PowerOffVM_Task, 'duration_secs': 0.189361} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.996428] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.996680] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.996987] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1c4bb77-b00a-4d87-8e99-e03ae2dddbcc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.074858] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.075320] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.075536] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Deleting the datastore file [datastore1] 27c3ed56-d24e-47d1-9c39-43b3b88a59b9 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.075847] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20064280-e905-4f85-b915-53710552a71a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.085996] env[61905]: DEBUG oslo_vmware.api [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for the task: (returnval){ [ 901.085996] env[61905]: value = "task-1362656" [ 901.085996] env[61905]: _type = "Task" [ 901.085996] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.094586] env[61905]: DEBUG oslo_vmware.api [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.221017] env[61905]: DEBUG oslo_vmware.api [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362652, 'name': PowerOnVM_Task, 'duration_secs': 0.392538} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.221416] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 901.221651] env[61905]: DEBUG nova.compute.manager [None req-7da2b157-e7d4-4e94-868c-60ed34629c5e tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 901.222507] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20022d9c-5cf7-45cf-b7d4-5cd5ebf4174a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.390418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b692866-50c4-407b-9124-59c0d7e7af23 tempest-ListServersNegativeTestJSON-322834082 tempest-ListServersNegativeTestJSON-322834082-project-member] Lock "74f94a46-63e4-44e0-9142-7e7d46cd31a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.633s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.431619] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362653, 'name': Rename_Task, 'duration_secs': 0.155796} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.436942] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.438441] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-022f0f03-97f6-44ef-a54c-8b582472a1c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.447030] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 901.447030] env[61905]: value = "task-1362657" [ 901.447030] env[61905]: _type = "Task" [ 901.447030] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.462951] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.485922] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362654, 'name': Rename_Task, 'duration_secs': 0.170699} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.486563] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 901.487777] env[61905]: DEBUG nova.compute.manager [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Received event network-changed-55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 901.487965] env[61905]: DEBUG nova.compute.manager [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Refreshing instance network info cache due to event network-changed-55782888-9c3d-4f40-852f-9cff30eb514b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 901.488204] env[61905]: DEBUG oslo_concurrency.lockutils [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] Acquiring lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.488354] env[61905]: DEBUG oslo_concurrency.lockutils [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] Acquired lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.488519] env[61905]: DEBUG nova.network.neutron [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Refreshing network info cache for port 55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.491789] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-358e4f4b-b479-472e-a92b-f0f8dd17d3a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.499915] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 901.499915] env[61905]: value = "task-1362658" [ 901.499915] env[61905]: _type = "Task" [ 901.499915] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.516020] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.595115] env[61905]: DEBUG oslo_vmware.api [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Task: {'id': task-1362656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440977} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.595403] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.595587] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.595756] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.595927] env[61905]: INFO nova.compute.manager [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 901.596276] env[61905]: DEBUG oslo.service.loopingcall [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.596682] env[61905]: DEBUG nova.compute.manager [-] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 901.596790] env[61905]: DEBUG nova.network.neutron [-] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.684167] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0cc462-4dbb-422e-8e54-cbeb3abbb3b0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.693221] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b6c971-37fd-49ea-861b-f1d838b5645c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.733229] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64eaf758-8986-44ba-8056-b42047b5d91e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.748019] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2a2664-4103-4228-8f61-f25b84c55cd3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.765321] env[61905]: DEBUG nova.compute.provider_tree [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.957670] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362657, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.010218] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362658, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.268969] env[61905]: DEBUG nova.scheduler.client.report [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.293446] env[61905]: DEBUG nova.network.neutron [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updated VIF entry in instance network info cache for port 55782888-9c3d-4f40-852f-9cff30eb514b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.293828] env[61905]: DEBUG nova.network.neutron [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [{"id": "55782888-9c3d-4f40-852f-9cff30eb514b", "address": "fa:16:3e:6e:f1:7a", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55782888-9c", "ovs_interfaceid": "55782888-9c3d-4f40-852f-9cff30eb514b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.457343] env[61905]: DEBUG oslo_vmware.api [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362657, 'name': PowerOnVM_Task, 'duration_secs': 0.65859} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.457675] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.457796] env[61905]: INFO nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Took 8.10 seconds to spawn the instance on the hypervisor. [ 902.457972] env[61905]: DEBUG nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 902.458875] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b972fb84-600d-47c3-a4c1-0004caf7f092 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.511162] env[61905]: DEBUG oslo_vmware.api [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362658, 'name': PowerOnVM_Task, 'duration_secs': 0.603383} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.511446] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.513031] env[61905]: DEBUG nova.compute.manager [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 902.513031] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0adbfb5-b407-48e6-8496-2255d3967b76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.558708] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "e9e00459-e685-431b-b194-cf426c7a743e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.558708] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "e9e00459-e685-431b-b194-cf426c7a743e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.558708] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "e9e00459-e685-431b-b194-cf426c7a743e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.559153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "e9e00459-e685-431b-b194-cf426c7a743e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.559425] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "e9e00459-e685-431b-b194-cf426c7a743e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.567229] env[61905]: INFO nova.compute.manager [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Terminating instance [ 902.569446] env[61905]: DEBUG nova.compute.manager [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 902.569689] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.570556] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe77123-a055-49db-a6ed-b03b682639df {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.578827] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.579159] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e69e982-1591-40b4-ac58-8597282f79be {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.585493] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 902.585493] env[61905]: value = "task-1362659" [ 902.585493] env[61905]: _type = "Task" [ 902.585493] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.593898] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.646390] env[61905]: DEBUG nova.network.neutron [-] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.774540] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.924s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.778201] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.360s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.778201] env[61905]: DEBUG nova.objects.instance [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lazy-loading 'resources' on Instance uuid e3b11ed6-b703-43a6-a528-28520ed43233 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 902.796496] env[61905]: DEBUG oslo_concurrency.lockutils [req-39b7101a-f561-4730-adae-d44642aed8bb req-4cfa379d-11e8-47a7-888a-9fbdd03f9384 service nova] Releasing lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.797894] env[61905]: INFO nova.scheduler.client.report [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Deleted allocations for instance 4bb7a2df-b472-4f6d-8a01-a55d0b86efda [ 902.976174] env[61905]: INFO nova.compute.manager [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Took 29.44 seconds to build instance. [ 903.030604] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.096491] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362659, 'name': PowerOffVM_Task, 'duration_secs': 0.380036} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.096789] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.097063] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.097375] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69d57c8d-e82f-400a-8c77-d468c34e36d3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.149593] env[61905]: INFO nova.compute.manager [-] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Took 1.55 seconds to deallocate network for instance. [ 903.175352] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.175577] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.175764] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Deleting the datastore file [datastore1] e9e00459-e685-431b-b194-cf426c7a743e {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.176051] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5369f4a-ff3c-40b4-a9f5-d38ee9296de8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.182556] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 903.182556] env[61905]: value = "task-1362661" [ 903.182556] env[61905]: _type = "Task" [ 903.182556] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.190629] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.316820] env[61905]: DEBUG oslo_concurrency.lockutils [None req-932f78b7-62a4-42b7-85ea-ac71ffd524f3 tempest-ServerMetadataTestJSON-7254021 tempest-ServerMetadataTestJSON-7254021-project-member] Lock "4bb7a2df-b472-4f6d-8a01-a55d0b86efda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.136s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.479230] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1371f30d-f9c4-4ffe-b3c1-783c1f9c8349 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.348s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.512117] env[61905]: DEBUG nova.compute.manager [req-e54acddd-1144-409b-81ca-83dc9e2f2d04 req-e5ebc7e9-e93b-4a03-bbcf-33acb963b650 service nova] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Received event network-vif-deleted-c2766fd3-662b-45c8-b9c8-765f49bd1701 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 903.537930] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d22660-f714-40f7-b11f-7929a74bbd2c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.545448] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4022004f-a6c8-4589-a233-895e89d6d655 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.576788] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b589d75a-9b8a-4aba-96f3-0ef203dad1fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.584701] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c542e8-624f-4b76-b75c-982b482578ff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.597892] env[61905]: DEBUG nova.compute.provider_tree [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.656535] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.694877] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.876184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.876463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.876735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.876983] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.877243] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.880346] env[61905]: INFO nova.compute.manager [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Terminating instance [ 903.882683] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "refresh_cache-ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.882781] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquired lock "refresh_cache-ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.883041] env[61905]: DEBUG nova.network.neutron [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.101133] env[61905]: DEBUG nova.scheduler.client.report [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.192325] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.402348] env[61905]: DEBUG nova.network.neutron [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.464703] env[61905]: DEBUG nova.network.neutron [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.606386] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.609342] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.123s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.610468] env[61905]: INFO nova.compute.claims [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.631802] env[61905]: INFO nova.scheduler.client.report [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance e3b11ed6-b703-43a6-a528-28520ed43233 [ 904.694013] env[61905]: DEBUG oslo_vmware.api [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.375977} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.694232] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.694435] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.694662] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.694858] env[61905]: INFO nova.compute.manager [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Took 2.13 seconds to destroy the instance on the hypervisor. [ 904.695154] env[61905]: DEBUG oslo.service.loopingcall [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.695390] env[61905]: DEBUG nova.compute.manager [-] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 904.695962] env[61905]: DEBUG nova.network.neutron [-] [instance: e9e00459-e685-431b-b194-cf426c7a743e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.968585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Releasing lock "refresh_cache-ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.968585] env[61905]: DEBUG nova.compute.manager [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 904.968585] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 904.969836] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758d56c4-4d7d-40c1-a011-77520c6da2f1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.978842] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.979213] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8dc12184-deb5-4a00-b2a6-b57ca4eacd5c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.986751] env[61905]: DEBUG nova.compute.manager [req-65074aa9-243f-4251-a8ad-f307059f2fb4 req-7357a406-466d-4cca-a310-a7774aa5d3d5 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Received event network-vif-deleted-a4b0c26b-f66a-462b-8be3-1f8271de80e6 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.986977] env[61905]: INFO nova.compute.manager [req-65074aa9-243f-4251-a8ad-f307059f2fb4 req-7357a406-466d-4cca-a310-a7774aa5d3d5 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Neutron deleted interface a4b0c26b-f66a-462b-8be3-1f8271de80e6; detaching it from the instance and deleting it from the info cache [ 904.987179] env[61905]: DEBUG nova.network.neutron [req-65074aa9-243f-4251-a8ad-f307059f2fb4 req-7357a406-466d-4cca-a310-a7774aa5d3d5 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.990689] env[61905]: DEBUG oslo_vmware.api [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 904.990689] env[61905]: value = "task-1362662" [ 904.990689] env[61905]: _type = "Task" [ 904.990689] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.003822] env[61905]: DEBUG oslo_vmware.api [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.058499] env[61905]: DEBUG nova.compute.manager [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 905.059441] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec68d3b1-3265-4cec-8cc0-2aef715ba12c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.138992] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1f53c435-8de0-42fb-96d7-2c7ac3a3641d tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "e3b11ed6-b703-43a6-a528-28520ed43233" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.886s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.457306] env[61905]: DEBUG nova.network.neutron [-] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.506789] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b627866-ab82-4235-bb0b-3b4df71fc89b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.506789] env[61905]: DEBUG oslo_vmware.api [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362662, 'name': PowerOffVM_Task, 'duration_secs': 0.191935} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.506789] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.506789] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 905.506789] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b755c1ec-b4cf-4481-bbc3-1a552e081ea8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.510438] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df541c5-7ef5-43d2-8e83-2c6a76443d44 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.543879] env[61905]: DEBUG nova.compute.manager [req-65074aa9-243f-4251-a8ad-f307059f2fb4 req-7357a406-466d-4cca-a310-a7774aa5d3d5 service nova] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Detach interface failed, port_id=a4b0c26b-f66a-462b-8be3-1f8271de80e6, reason: Instance e9e00459-e685-431b-b194-cf426c7a743e could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 905.552021] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 905.552021] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 905.552021] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Deleting the datastore file [datastore2] ebf7849c-716f-4b4c-bb9c-42c090d0b3c0 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 905.552021] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8f0b018-0bf0-416d-97b8-8d29c1bc67d6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.560041] env[61905]: DEBUG oslo_vmware.api [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for the task: (returnval){ [ 905.560041] env[61905]: value = "task-1362664" [ 905.560041] env[61905]: _type = "Task" [ 905.560041] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.573189] env[61905]: INFO nova.compute.manager [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] instance snapshotting [ 905.574956] env[61905]: DEBUG oslo_vmware.api [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.578614] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23248f3b-4c4f-4997-9d53-4f0f24c4a363 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.598701] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab51c34f-138a-43a4-a5c0-e8e80d3e557c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.808704] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55092675-7399-4c23-9f1b-dae20abc0323 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.816341] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ce23b9-99c1-45c4-9298-5f3125800cb3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.845195] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc792c8-bfe5-4dfa-bde4-c94b02d86314 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.852340] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634f3d14-81c7-44a7-9f4f-3be134ee1bd8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.866386] env[61905]: DEBUG nova.compute.provider_tree [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.959275] env[61905]: INFO nova.compute.manager [-] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Took 1.26 seconds to deallocate network for instance. [ 906.070130] env[61905]: DEBUG oslo_vmware.api [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Task: {'id': task-1362664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.427661} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.070469] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.070752] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.070952] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.071192] env[61905]: INFO nova.compute.manager [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Took 1.10 seconds to destroy the instance on the hypervisor. [ 906.071473] env[61905]: DEBUG oslo.service.loopingcall [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.071685] env[61905]: DEBUG nova.compute.manager [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.071797] env[61905]: DEBUG nova.network.neutron [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.088793] env[61905]: DEBUG nova.network.neutron [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.097779] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "55a9190b-52f7-4bba-81b0-079e62537183" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.098019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "55a9190b-52f7-4bba-81b0-079e62537183" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.109417] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Creating Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 906.109893] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9feca6da-c4d5-4a8a-a732-c0a54feadb26 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.117441] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 906.117441] env[61905]: value = "task-1362665" [ 906.117441] env[61905]: _type = "Task" [ 906.117441] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.125327] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.369974] env[61905]: DEBUG nova.scheduler.client.report [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 906.465986] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.591189] env[61905]: DEBUG nova.network.neutron [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.599943] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 906.626686] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.875955] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.876577] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 906.879215] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.656s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.093655] env[61905]: INFO nova.compute.manager [-] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Took 1.02 seconds to deallocate network for instance. [ 907.122775] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.128931] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.384022] env[61905]: DEBUG nova.compute.utils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 907.390053] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 907.390409] env[61905]: DEBUG nova.network.neutron [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.437914] env[61905]: DEBUG nova.policy [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '947946764fc64847946057d867de54bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '980cc259c0254e84989e0cfc0e45837f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 907.600291] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.627818] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.681628] env[61905]: DEBUG nova.network.neutron [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Successfully created port: 3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.890778] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 907.919218] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 12c21d8e-1941-4481-9216-015ba6c09b9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 907.919453] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance a6e45dd1-e0ee-4bda-9513-4b1000e15e49 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 907.919589] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 38b80675-182a-422c-9222-aa78ed59c351 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 907.919717] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 2c919b69-0e09-431d-8a75-98d5740c7dab is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 907.919834] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1502df44-9166-4ce8-9117-a57e7be2d299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 907.919954] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 299479fb-9a94-40b8-928d-8e491dbe1af1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 907.920079] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 0f7ccb34-cb14-4b21-ae61-b066427d400e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 907.920819] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e9e00459-e685-431b-b194-cf426c7a743e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 907.921055] env[61905]: WARNING nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 27c3ed56-d24e-47d1-9c39-43b3b88a59b9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 907.921197] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance ebf7849c-716f-4b4c-bb9c-42c090d0b3c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 907.921323] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 7b0db0a2-c990-4160-9be8-018239425114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 907.921440] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 22b6d87c-08c5-492c-a963-f7ad6ef5db5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 907.921888] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance b9400202-eb37-4c75-bbf3-807edb7bc16f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 908.128342] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.424511] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 4b1723a2-94a2-4070-9b47-85c9c8169137 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 908.628073] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.906220] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 908.927102] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 63eb2219-fea2-4af0-90d2-e8d9ac53a138 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 908.931030] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 908.931030] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 908.931227] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.931340] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 908.931490] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.931639] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 908.931851] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 908.932026] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 908.932253] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 908.932484] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 908.932787] env[61905]: DEBUG nova.virt.hardware [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.933807] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c84d7b-97a2-40d8-b9e4-e8be5a572586 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.942826] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4afa2f0-87c7-4928-a1bf-8d5985815eb7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.099638] env[61905]: DEBUG nova.compute.manager [req-59e829d3-d2ff-4e2b-af1d-98eb3adc9204 req-deff52f0-1bea-4c45-a2d8-4a69d95f67cd service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Received event network-vif-plugged-3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.099735] env[61905]: DEBUG oslo_concurrency.lockutils [req-59e829d3-d2ff-4e2b-af1d-98eb3adc9204 req-deff52f0-1bea-4c45-a2d8-4a69d95f67cd service nova] Acquiring lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.099933] env[61905]: DEBUG oslo_concurrency.lockutils [req-59e829d3-d2ff-4e2b-af1d-98eb3adc9204 req-deff52f0-1bea-4c45-a2d8-4a69d95f67cd service nova] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.100135] env[61905]: DEBUG oslo_concurrency.lockutils [req-59e829d3-d2ff-4e2b-af1d-98eb3adc9204 req-deff52f0-1bea-4c45-a2d8-4a69d95f67cd service nova] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.100336] env[61905]: DEBUG nova.compute.manager [req-59e829d3-d2ff-4e2b-af1d-98eb3adc9204 req-deff52f0-1bea-4c45-a2d8-4a69d95f67cd service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] No waiting events found dispatching network-vif-plugged-3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 909.100513] env[61905]: WARNING nova.compute.manager [req-59e829d3-d2ff-4e2b-af1d-98eb3adc9204 req-deff52f0-1bea-4c45-a2d8-4a69d95f67cd service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Received unexpected event network-vif-plugged-3802415e-d978-40f5-8265-2e03cbdd0814 for instance with vm_state building and task_state spawning. [ 909.130808] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362665, 'name': CreateSnapshot_Task, 'duration_secs': 2.940677} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.131175] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Created Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 909.132250] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6207e903-25fa-4017-9730-6ada9d483ad2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.438244] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 55a9190b-52f7-4bba-81b0-079e62537183 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 909.438485] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 909.438634] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 909.629075] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d3c848-f9b1-4568-a4d1-fb0f9155ad0c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.636202] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e451397-5bca-478a-83a2-3f9e2bcded8a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.675111] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Creating linked-clone VM from snapshot {{(pid=61905) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 909.676344] env[61905]: DEBUG nova.network.neutron [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Successfully updated port: 3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 909.677538] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4ad48d80-cba6-413e-9543-1cfa4d00afc9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.680981] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b660eb-0781-4d55-83e8-2b3b3295b69a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.685443] env[61905]: DEBUG nova.compute.manager [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Received event network-changed-3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.685608] env[61905]: DEBUG nova.compute.manager [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Refreshing instance network info cache due to event network-changed-3802415e-d978-40f5-8265-2e03cbdd0814. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 909.685826] env[61905]: DEBUG oslo_concurrency.lockutils [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] Acquiring lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.685970] env[61905]: DEBUG oslo_concurrency.lockutils [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] Acquired lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.686154] env[61905]: DEBUG nova.network.neutron [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Refreshing network info cache for port 3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 909.694047] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a2cc50-6e2b-45c6-9c87-59d6e7870cbf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.698107] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 909.698107] env[61905]: value = "task-1362666" [ 909.698107] env[61905]: _type = "Task" [ 909.698107] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.709885] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.715559] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362666, 'name': CloneVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.185632] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.209046] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362666, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.212424] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 910.218302] env[61905]: DEBUG nova.network.neutron [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.292050] env[61905]: DEBUG nova.network.neutron [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.711136] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362666, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.717055] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 910.717055] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.838s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.717351] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.825s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.717555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.719527] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.186s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.719718] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.721381] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.177s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.721561] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.723075] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.613s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.724475] env[61905]: INFO nova.compute.claims [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.747876] env[61905]: INFO nova.scheduler.client.report [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted allocations for instance 38b80675-182a-422c-9222-aa78ed59c351 [ 910.751185] env[61905]: INFO nova.scheduler.client.report [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted allocations for instance 2c919b69-0e09-431d-8a75-98d5740c7dab [ 910.767431] env[61905]: INFO nova.scheduler.client.report [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocations for instance a6e45dd1-e0ee-4bda-9513-4b1000e15e49 [ 910.795041] env[61905]: DEBUG oslo_concurrency.lockutils [req-239bf717-35d2-4865-b926-7a4821d28f3a req-1d9fb3bb-f1e0-4a2a-a8f4-6b01f02ff88a service nova] Releasing lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.795638] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.795804] env[61905]: DEBUG nova.network.neutron [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.210348] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362666, 'name': CloneVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.263425] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fdc511d5-b499-4fa2-8d0b-65dc9d791dca tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "38b80675-182a-422c-9222-aa78ed59c351" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.934s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.264450] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7161b4be-7d03-4d85-a7f6-fc242a16a967 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "2c919b69-0e09-431d-8a75-98d5740c7dab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.853s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.274386] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cec6c7f6-b0e4-4a0c-9665-9702e905551a tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "a6e45dd1-e0ee-4bda-9513-4b1000e15e49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.647s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.324608] env[61905]: DEBUG nova.network.neutron [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.454817] env[61905]: DEBUG nova.network.neutron [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [{"id": "3802415e-d978-40f5-8265-2e03cbdd0814", "address": "fa:16:3e:3c:3e:e6", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3802415e-d9", "ovs_interfaceid": "3802415e-d978-40f5-8265-2e03cbdd0814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.710315] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362666, 'name': CloneVM_Task, 'duration_secs': 1.59569} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.710579] env[61905]: INFO nova.virt.vmwareapi.vmops [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Created linked-clone VM from snapshot [ 911.711317] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6633a8b6-1666-46d9-917f-3745cbc02e52 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.718778] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Uploading image 4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 911.741385] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 911.741385] env[61905]: value = "vm-290066" [ 911.741385] env[61905]: _type = "VirtualMachine" [ 911.741385] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 911.741624] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e7de7276-df0e-4f1a-b09c-631ecf81c82a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.748882] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lease: (returnval){ [ 911.748882] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52160285-b5b6-31cb-3fce-3212e7cc68eb" [ 911.748882] env[61905]: _type = "HttpNfcLease" [ 911.748882] env[61905]: } obtained for exporting VM: (result){ [ 911.748882] env[61905]: value = "vm-290066" [ 911.748882] env[61905]: _type = "VirtualMachine" [ 911.748882] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 911.749136] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the lease: (returnval){ [ 911.749136] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52160285-b5b6-31cb-3fce-3212e7cc68eb" [ 911.749136] env[61905]: _type = "HttpNfcLease" [ 911.749136] env[61905]: } to be ready. {{(pid=61905) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 911.758483] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 911.758483] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52160285-b5b6-31cb-3fce-3212e7cc68eb" [ 911.758483] env[61905]: _type = "HttpNfcLease" [ 911.758483] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 911.906760] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1973a6-0a48-4efa-a1fe-a3c460681c75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.914509] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9eedef-baf5-4140-9b1a-d8444084bf6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.944687] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77aeeec8-e558-4566-a0e4-7e47c22cb4ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.952191] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1843dce0-47c6-4411-bf35-3e892766f63e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.957576] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.957899] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Instance network_info: |[{"id": "3802415e-d978-40f5-8265-2e03cbdd0814", "address": "fa:16:3e:3c:3e:e6", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3802415e-d9", "ovs_interfaceid": "3802415e-d978-40f5-8265-2e03cbdd0814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 911.965905] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:3e:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3802415e-d978-40f5-8265-2e03cbdd0814', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.973047] env[61905]: DEBUG oslo.service.loopingcall [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 911.973537] env[61905]: DEBUG nova.compute.provider_tree [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.974945] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.975867] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c7cb61d-c07f-4ba1-a98e-9288e3f0e5bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.996316] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.996316] env[61905]: value = "task-1362668" [ 911.996316] env[61905]: _type = "Task" [ 911.996316] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.004339] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362668, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.136951] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.137350] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.166779] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "e50cac29-797e-44a2-aafc-868e45ffd9cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.167022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.261881] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 912.261881] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52160285-b5b6-31cb-3fce-3212e7cc68eb" [ 912.261881] env[61905]: _type = "HttpNfcLease" [ 912.261881] env[61905]: } is ready. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 912.262627] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 912.262627] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52160285-b5b6-31cb-3fce-3212e7cc68eb" [ 912.262627] env[61905]: _type = "HttpNfcLease" [ 912.262627] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 912.263181] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0823c3ae-3601-4f33-a89d-d97dc54e8a24 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.279600] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddd267-db46-039a-d969-dd6474d3f2aa/disk-0.vmdk from lease info. {{(pid=61905) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 912.279811] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddd267-db46-039a-d969-dd6474d3f2aa/disk-0.vmdk for reading. {{(pid=61905) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 912.383380] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b76261d7-cdfe-4f06-a13b-b5723b6f89e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.477197] env[61905]: DEBUG nova.scheduler.client.report [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.506683] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362668, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.641026] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.669767] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 912.983501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.260s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.984091] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 912.987727] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.011s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.987958] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.991606] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.991807] env[61905]: INFO nova.compute.claims [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.006820] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362668, 'name': CreateVM_Task, 'duration_secs': 0.568958} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.007206] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 913.008419] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.008419] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.008419] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 913.008646] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76f63be6-deb4-44a2-b510-e1748a92b21b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.013347] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 913.013347] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e1957c-e528-4abf-5ceb-e1ff9daa2f82" [ 913.013347] env[61905]: _type = "Task" [ 913.013347] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.021339] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e1957c-e528-4abf-5ceb-e1ff9daa2f82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.035677] env[61905]: INFO nova.scheduler.client.report [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Deleted allocations for instance 299479fb-9a94-40b8-928d-8e491dbe1af1 [ 913.166066] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.191219] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.504518] env[61905]: DEBUG nova.compute.utils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 913.512026] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 913.512026] env[61905]: DEBUG nova.network.neutron [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 913.528299] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e1957c-e528-4abf-5ceb-e1ff9daa2f82, 'name': SearchDatastore_Task, 'duration_secs': 0.010944} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.528913] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.528913] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.529184] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.529466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.529713] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 913.530021] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4681525d-799a-4351-855c-4b72caefad97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.542264] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 913.542628] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 913.543710] env[61905]: DEBUG oslo_concurrency.lockutils [None req-71867cdf-7401-4dc6-88f4-0a2299f8c9a7 tempest-ServerAddressesNegativeTestJSON-1562782 tempest-ServerAddressesNegativeTestJSON-1562782-project-member] Lock "299479fb-9a94-40b8-928d-8e491dbe1af1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.155s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.548261] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-113598bc-04fb-4e1c-9536-12bd1e8f41d7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.554385] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 913.554385] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d67ccc-f9c8-b82a-901c-bcb04abea7f0" [ 913.554385] env[61905]: _type = "Task" [ 913.554385] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.563928] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d67ccc-f9c8-b82a-901c-bcb04abea7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.584358] env[61905]: DEBUG nova.policy [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f27bcad7ab3b4e0e98065f24300f9425', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30edd7bc94ee492cb7f4e4f388e45b8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 914.017906] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 914.066121] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d67ccc-f9c8-b82a-901c-bcb04abea7f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010153} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.067908] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d95576d6-b51c-4b5b-999a-094223eb65c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.076930] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 914.076930] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527814aa-3b7f-b63f-17df-51f1883d8b03" [ 914.076930] env[61905]: _type = "Task" [ 914.076930] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.092347] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527814aa-3b7f-b63f-17df-51f1883d8b03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.261414] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745a2775-60ca-4a1f-a78d-de1012f01f78 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.269724] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ff6356-c8fb-4e81-b637-f209a884e677 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.315208] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90741814-f106-43e2-a8c5-9a64e3909b0c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.323195] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f47002-86db-48e0-9ed6-6a3f4fec2894 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.340048] env[61905]: DEBUG nova.compute.provider_tree [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.351130] env[61905]: DEBUG nova.network.neutron [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Successfully created port: f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.589084] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527814aa-3b7f-b63f-17df-51f1883d8b03, 'name': SearchDatastore_Task, 'duration_secs': 0.013114} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.589528] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.590402] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f/b9400202-eb37-4c75-bbf3-807edb7bc16f.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 914.590402] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ee12fca-5839-462c-9038-b8f70dfef641 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.596539] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 914.596539] env[61905]: value = "task-1362669" [ 914.596539] env[61905]: _type = "Task" [ 914.596539] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.605647] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.844169] env[61905]: DEBUG nova.scheduler.client.report [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.028995] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 915.059023] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 915.059759] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 915.060098] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.060420] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 915.060639] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.060849] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 915.061159] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 915.061562] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 915.061774] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 915.062069] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 915.062389] env[61905]: DEBUG nova.virt.hardware [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 915.063394] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66845c39-bf05-4556-bc6d-ed2ac44b88f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.073166] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3177e24c-6e25-4b95-a0f2-35216190ba64 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.111919] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362669, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.352788] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.353155] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 915.356378] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.325s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.356378] env[61905]: DEBUG nova.objects.instance [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 915.607807] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577374} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.608128] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f/b9400202-eb37-4c75-bbf3-807edb7bc16f.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 915.608425] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 915.608752] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddaf9027-5b2b-4398-a984-d26dcce909ac {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.615133] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 915.615133] env[61905]: value = "task-1362670" [ 915.615133] env[61905]: _type = "Task" [ 915.615133] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.623308] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362670, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.762689] env[61905]: DEBUG nova.compute.manager [req-2fd763de-9877-4f8d-ac17-329f9ae0aa70 req-2f889415-d48a-44d1-87bd-08516e17df52 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Received event network-vif-plugged-f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.762882] env[61905]: DEBUG oslo_concurrency.lockutils [req-2fd763de-9877-4f8d-ac17-329f9ae0aa70 req-2f889415-d48a-44d1-87bd-08516e17df52 service nova] Acquiring lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.763087] env[61905]: DEBUG oslo_concurrency.lockutils [req-2fd763de-9877-4f8d-ac17-329f9ae0aa70 req-2f889415-d48a-44d1-87bd-08516e17df52 service nova] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.763234] env[61905]: DEBUG oslo_concurrency.lockutils [req-2fd763de-9877-4f8d-ac17-329f9ae0aa70 req-2f889415-d48a-44d1-87bd-08516e17df52 service nova] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.763404] env[61905]: DEBUG nova.compute.manager [req-2fd763de-9877-4f8d-ac17-329f9ae0aa70 req-2f889415-d48a-44d1-87bd-08516e17df52 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] No waiting events found dispatching network-vif-plugged-f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 915.763575] env[61905]: WARNING nova.compute.manager [req-2fd763de-9877-4f8d-ac17-329f9ae0aa70 req-2f889415-d48a-44d1-87bd-08516e17df52 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Received unexpected event network-vif-plugged-f8651682-560e-4a78-8a0a-bd0024272caa for instance with vm_state building and task_state spawning. [ 915.860686] env[61905]: DEBUG nova.compute.utils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 915.864985] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 915.866414] env[61905]: DEBUG nova.network.neutron [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 915.868262] env[61905]: DEBUG nova.network.neutron [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Successfully updated port: f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.908629] env[61905]: DEBUG nova.policy [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ff25da762d5421b9f1e24e4bcead22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cd0317a9e0e4f1d86c49a82e8ffbaa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 916.124958] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362670, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064515} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.125435] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.126309] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f74a91-6315-4290-b130-ec28ca4002f3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.148701] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f/b9400202-eb37-4c75-bbf3-807edb7bc16f.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.149141] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d59a69c-75f8-40ed-9fcb-ea89aa84b2af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.168742] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 916.168742] env[61905]: value = "task-1362671" [ 916.168742] env[61905]: _type = "Task" [ 916.168742] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.179489] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362671, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.265889] env[61905]: DEBUG nova.network.neutron [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Successfully created port: d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.368237] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 916.377418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-74528ba1-4454-437a-b2fb-280d7b645b17 tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.377418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.377418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.377418] env[61905]: DEBUG nova.network.neutron [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 916.377418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.718s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.377418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.377418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.911s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.377798] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.380077] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.257s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.380887] env[61905]: INFO nova.compute.claims [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.421725] env[61905]: INFO nova.scheduler.client.report [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Deleted allocations for instance e9e00459-e685-431b-b194-cf426c7a743e [ 916.503273] env[61905]: INFO nova.scheduler.client.report [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Deleted allocations for instance 27c3ed56-d24e-47d1-9c39-43b3b88a59b9 [ 916.680030] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362671, 'name': ReconfigVM_Task, 'duration_secs': 0.492648} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.680030] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Reconfigured VM instance instance-00000053 to attach disk [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f/b9400202-eb37-4c75-bbf3-807edb7bc16f.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 916.680508] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4b67f3d-5531-4a1d-8583-49f1fee94238 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.686823] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 916.686823] env[61905]: value = "task-1362672" [ 916.686823] env[61905]: _type = "Task" [ 916.686823] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.700408] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362672, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.920711] env[61905]: DEBUG nova.network.neutron [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.929887] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d4586ddb-e15c-4b4f-a87f-7af2fee65d05 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "e9e00459-e685-431b-b194-cf426c7a743e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.371s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.013050] env[61905]: DEBUG oslo_concurrency.lockutils [None req-acf12311-0183-4dc6-9f22-0c98def18e74 tempest-InstanceActionsTestJSON-1472394411 tempest-InstanceActionsTestJSON-1472394411-project-member] Lock "27c3ed56-d24e-47d1-9c39-43b3b88a59b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.564s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.109020] env[61905]: DEBUG nova.network.neutron [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updating instance_info_cache with network_info: [{"id": "f8651682-560e-4a78-8a0a-bd0024272caa", "address": "fa:16:3e:f8:aa:04", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8651682-56", "ovs_interfaceid": "f8651682-560e-4a78-8a0a-bd0024272caa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.198128] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362672, 'name': Rename_Task, 'duration_secs': 0.2051} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.198684] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.198684] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec49a880-317b-4e5f-810f-b4a113d0fbbe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.204917] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 917.204917] env[61905]: value = "task-1362673" [ 917.204917] env[61905]: _type = "Task" [ 917.204917] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.218022] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.383112] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 917.408403] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 917.408697] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 917.408697] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.409668] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 917.409668] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.409668] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 917.409668] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 917.409668] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 917.409668] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 917.410129] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 917.410129] env[61905]: DEBUG nova.virt.hardware [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.411139] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ffee69-1d1c-4d49-bd1e-eee360fbe387 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.422424] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c51199-a6af-4cbe-bd42-6f7719f651af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.538831] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.538959] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.539104] env[61905]: INFO nova.compute.manager [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Shelving [ 917.564370] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36af1fc8-1f3a-4bf6-9a5c-3ce8803e1e8d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.572662] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfc85d9-a00e-4216-9893-ddb13ae9f81d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.605392] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f3aaea-dca4-422e-8f06-6c17c5d29234 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.612634] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.612948] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Instance network_info: |[{"id": "f8651682-560e-4a78-8a0a-bd0024272caa", "address": "fa:16:3e:f8:aa:04", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8651682-56", "ovs_interfaceid": "f8651682-560e-4a78-8a0a-bd0024272caa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 917.613619] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:aa:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8651682-560e-4a78-8a0a-bd0024272caa', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.621050] env[61905]: DEBUG oslo.service.loopingcall [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.622317] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba8e222-81ac-4844-a418-e4b5a7477e3b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.626594] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.626844] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c54fb17c-d5cc-4982-822d-cd201b647d3d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.652674] env[61905]: DEBUG nova.compute.provider_tree [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.655267] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.655267] env[61905]: value = "task-1362674" [ 917.655267] env[61905]: _type = "Task" [ 917.655267] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.663974] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362674, 'name': CreateVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.715758] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362673, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.790539] env[61905]: DEBUG nova.compute.manager [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Received event network-changed-f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.790738] env[61905]: DEBUG nova.compute.manager [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Refreshing instance network info cache due to event network-changed-f8651682-560e-4a78-8a0a-bd0024272caa. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 917.790992] env[61905]: DEBUG oslo_concurrency.lockutils [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] Acquiring lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.791118] env[61905]: DEBUG oslo_concurrency.lockutils [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] Acquired lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.791269] env[61905]: DEBUG nova.network.neutron [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Refreshing network info cache for port f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 917.828189] env[61905]: DEBUG nova.compute.manager [req-4953087c-a40e-43c0-980c-f65bdd4199e5 req-8b05eb27-1055-4836-9cfc-369d5e8879f2 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-vif-plugged-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.828425] env[61905]: DEBUG oslo_concurrency.lockutils [req-4953087c-a40e-43c0-980c-f65bdd4199e5 req-8b05eb27-1055-4836-9cfc-369d5e8879f2 service nova] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.828721] env[61905]: DEBUG oslo_concurrency.lockutils [req-4953087c-a40e-43c0-980c-f65bdd4199e5 req-8b05eb27-1055-4836-9cfc-369d5e8879f2 service nova] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.828987] env[61905]: DEBUG oslo_concurrency.lockutils [req-4953087c-a40e-43c0-980c-f65bdd4199e5 req-8b05eb27-1055-4836-9cfc-369d5e8879f2 service nova] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.829317] env[61905]: DEBUG nova.compute.manager [req-4953087c-a40e-43c0-980c-f65bdd4199e5 req-8b05eb27-1055-4836-9cfc-369d5e8879f2 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] No waiting events found dispatching network-vif-plugged-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 917.829549] env[61905]: WARNING nova.compute.manager [req-4953087c-a40e-43c0-980c-f65bdd4199e5 req-8b05eb27-1055-4836-9cfc-369d5e8879f2 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received unexpected event network-vif-plugged-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 for instance with vm_state building and task_state spawning. [ 917.925110] env[61905]: DEBUG nova.network.neutron [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Successfully updated port: d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 917.937690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "12c21d8e-1941-4481-9216-015ba6c09b9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.937690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.937690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "12c21d8e-1941-4481-9216-015ba6c09b9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.937690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.937690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.940107] env[61905]: INFO nova.compute.manager [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Terminating instance [ 917.942455] env[61905]: DEBUG nova.compute.manager [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 917.945494] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.945494] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46583d48-4046-412f-8a4f-741ccf6f771d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.953853] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.954158] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa4c4c1a-d8f0-46ef-b606-0c40d557ec32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.961465] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 917.961465] env[61905]: value = "task-1362675" [ 917.961465] env[61905]: _type = "Task" [ 917.961465] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.970437] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.047614] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.048207] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-309c1175-c3db-45cc-bcf5-92c75dacf58e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.055592] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 918.055592] env[61905]: value = "task-1362676" [ 918.055592] env[61905]: _type = "Task" [ 918.055592] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.065494] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.156980] env[61905]: DEBUG nova.scheduler.client.report [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.170730] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362674, 'name': CreateVM_Task, 'duration_secs': 0.347362} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.170941] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 918.171749] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.171894] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.172778] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 918.172778] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af33b29a-230e-43ea-9029-5ae0cc8b0fa4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.178775] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 918.178775] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e4b31-239a-769e-a012-bc2a85d5fe2f" [ 918.178775] env[61905]: _type = "Task" [ 918.178775] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.187615] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e4b31-239a-769e-a012-bc2a85d5fe2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.216295] env[61905]: DEBUG oslo_vmware.api [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362673, 'name': PowerOnVM_Task, 'duration_secs': 0.566729} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.216663] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.216844] env[61905]: INFO nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Took 9.31 seconds to spawn the instance on the hypervisor. [ 918.217171] env[61905]: DEBUG nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 918.218012] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952b80d8-0ab3-4e1d-9978-87a5953da48a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.428712] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.428863] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.429012] env[61905]: DEBUG nova.network.neutron [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.472118] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362675, 'name': PowerOffVM_Task, 'duration_secs': 0.345924} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.472449] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.472635] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 918.476499] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba4f56c7-fd50-43d3-b512-d479a888bc7f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.543610] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.543841] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.544039] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Deleting the datastore file [datastore2] 12c21d8e-1941-4481-9216-015ba6c09b9b {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.544299] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65d139f2-cf4f-4bdc-8453-e01a5b8c11b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.551935] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for the task: (returnval){ [ 918.551935] env[61905]: value = "task-1362678" [ 918.551935] env[61905]: _type = "Task" [ 918.551935] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.562722] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.568302] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362676, 'name': PowerOffVM_Task, 'duration_secs': 0.195707} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.568739] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.570174] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42f40a3-6e1d-400f-98bf-cc4bc2d80cb4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.590771] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f95448a-a0ec-4c20-b866-1146122ecdbb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.665454] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.666507] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 918.672625] env[61905]: DEBUG nova.network.neutron [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updated VIF entry in instance network info cache for port f8651682-560e-4a78-8a0a-bd0024272caa. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 918.672625] env[61905]: DEBUG nova.network.neutron [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updating instance_info_cache with network_info: [{"id": "f8651682-560e-4a78-8a0a-bd0024272caa", "address": "fa:16:3e:f8:aa:04", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8651682-56", "ovs_interfaceid": "f8651682-560e-4a78-8a0a-bd0024272caa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.672625] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.071s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.672625] env[61905]: DEBUG nova.objects.instance [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lazy-loading 'resources' on Instance uuid ebf7849c-716f-4b4c-bb9c-42c090d0b3c0 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.691539] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e4b31-239a-769e-a012-bc2a85d5fe2f, 'name': SearchDatastore_Task, 'duration_secs': 0.0246} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.691886] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.692191] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.692981] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.692981] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.692981] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.693218] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd8f7c82-9885-4fba-8856-51aae88c0ebf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.704591] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.704823] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.705568] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb64b971-12ac-4aa9-b35e-db6a70fd0ec8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.711850] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 918.711850] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a3de87-5d21-517e-eb84-f6a14a5b9cf6" [ 918.711850] env[61905]: _type = "Task" [ 918.711850] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.721040] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a3de87-5d21-517e-eb84-f6a14a5b9cf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.735596] env[61905]: INFO nova.compute.manager [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Took 41.28 seconds to build instance. [ 918.981909] env[61905]: DEBUG nova.network.neutron [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.061115] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.103280] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Creating Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 919.103280] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-24a98424-f126-42ab-8b51-07182accb074 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.111413] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 919.111413] env[61905]: value = "task-1362679" [ 919.111413] env[61905]: _type = "Task" [ 919.111413] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.120387] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362679, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.174527] env[61905]: DEBUG nova.compute.utils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 919.176972] env[61905]: DEBUG oslo_concurrency.lockutils [req-6fe1ebef-a8fd-4c45-8a00-719a4aa3183c req-7a65309d-5b10-497a-a54a-af958e578af7 service nova] Releasing lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.177968] env[61905]: DEBUG nova.network.neutron [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.181899] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 919.182561] env[61905]: DEBUG nova.network.neutron [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.226829] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a3de87-5d21-517e-eb84-f6a14a5b9cf6, 'name': SearchDatastore_Task, 'duration_secs': 0.021125} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.228031] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28afdd7f-8b71-4af3-ac6a-de015cbde135 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.233618] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 919.233618] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a27ea-f662-3f7b-74b3-09d61a509fab" [ 919.233618] env[61905]: _type = "Task" [ 919.233618] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.237497] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f792f10d-9fcb-4e1f-a2cb-f73c6fd2d775 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.790s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.242799] env[61905]: DEBUG nova.policy [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 919.248460] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a27ea-f662-3f7b-74b3-09d61a509fab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.405605] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d0ecbc-77a1-4d64-ab05-9224cdaf1a0e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.414296] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdab552-cad2-4839-a9b9-9d7f280b4c08 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.450815] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d42237-d419-4b05-b188-be2f82beca5c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.458946] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feee367-ea16-4f70-819a-05fc74632947 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.474029] env[61905]: DEBUG nova.compute.provider_tree [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.564239] env[61905]: DEBUG oslo_vmware.api [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Task: {'id': task-1362678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.589941} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.564588] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 919.564840] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 919.565010] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.565291] env[61905]: INFO nova.compute.manager [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Took 1.62 seconds to destroy the instance on the hypervisor. [ 919.565576] env[61905]: DEBUG oslo.service.loopingcall [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.565817] env[61905]: DEBUG nova.compute.manager [-] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 919.565915] env[61905]: DEBUG nova.network.neutron [-] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.621997] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362679, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.671816] env[61905]: DEBUG nova.network.neutron [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Successfully created port: ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.677757] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 919.686289] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.686775] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Instance network_info: |[{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 919.687354] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:fc:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9c4edd5-d88e-4996-afea-00130ace0dad', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1260d42-8ebd-4227-91b1-e34c80b3bdb0', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.700479] env[61905]: DEBUG oslo.service.loopingcall [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.700841] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 919.701244] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f99f5399-6daf-4aa2-88be-e07a3aa16255 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.723666] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.723666] env[61905]: value = "task-1362680" [ 919.723666] env[61905]: _type = "Task" [ 919.723666] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.739247] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362680, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.751257] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a27ea-f662-3f7b-74b3-09d61a509fab, 'name': SearchDatastore_Task, 'duration_secs': 0.024358} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.751811] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.751954] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 4b1723a2-94a2-4070-9b47-85c9c8169137/4b1723a2-94a2-4070-9b47-85c9c8169137.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 919.752740] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea4cf760-36b8-48d5-8719-4f135b1e566c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.759785] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 919.759785] env[61905]: value = "task-1362681" [ 919.759785] env[61905]: _type = "Task" [ 919.759785] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.771929] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.883930] env[61905]: DEBUG nova.compute.manager [req-ccc605b2-21b2-470d-ac26-3eeda01f36ad req-279d949c-f11d-4db1-a431-41b093819c98 service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Received event network-vif-deleted-8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.884754] env[61905]: INFO nova.compute.manager [req-ccc605b2-21b2-470d-ac26-3eeda01f36ad req-279d949c-f11d-4db1-a431-41b093819c98 service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Neutron deleted interface 8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41; detaching it from the instance and deleting it from the info cache [ 919.884981] env[61905]: DEBUG nova.network.neutron [req-ccc605b2-21b2-470d-ac26-3eeda01f36ad req-279d949c-f11d-4db1-a431-41b093819c98 service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.909087] env[61905]: DEBUG nova.compute.manager [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.909087] env[61905]: DEBUG nova.compute.manager [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing instance network info cache due to event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 919.909087] env[61905]: DEBUG oslo_concurrency.lockutils [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.909287] env[61905]: DEBUG oslo_concurrency.lockutils [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.909537] env[61905]: DEBUG nova.network.neutron [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.977793] env[61905]: DEBUG nova.scheduler.client.report [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 920.124230] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362679, 'name': CreateSnapshot_Task, 'duration_secs': 0.835783} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.124610] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Created Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 920.125649] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03398c2c-bc8e-4af5-ae9d-abcf9eed50ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.235848] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362680, 'name': CreateVM_Task, 'duration_secs': 0.369443} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.236791] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 920.237611] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.237786] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.238182] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 920.238676] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac4e79fe-936c-4a0d-b01b-28b50c486519 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.243815] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 920.243815] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524c1a4a-2401-e4d4-1acf-20694f240eee" [ 920.243815] env[61905]: _type = "Task" [ 920.243815] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.252160] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524c1a4a-2401-e4d4-1acf-20694f240eee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.269550] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362681, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.322749] env[61905]: DEBUG nova.network.neutron [-] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.389053] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31ad5f62-0a97-4bd1-84b0-ced3963ecc54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.398792] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527aefe4-2976-443d-a1f8-93e982c5b6de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.428822] env[61905]: DEBUG nova.compute.manager [req-ccc605b2-21b2-470d-ac26-3eeda01f36ad req-279d949c-f11d-4db1-a431-41b093819c98 service nova] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Detach interface failed, port_id=8ee1c399-6b35-4fa0-bdf9-4ad8f5d71f41, reason: Instance 12c21d8e-1941-4481-9216-015ba6c09b9b could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 920.489255] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.492568] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.327s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.494262] env[61905]: INFO nova.compute.claims [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.519024] env[61905]: INFO nova.scheduler.client.report [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Deleted allocations for instance ebf7849c-716f-4b4c-bb9c-42c090d0b3c0 [ 920.649203] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Creating linked-clone VM from snapshot {{(pid=61905) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 920.650960] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-70a7d32b-63b5-4d60-ba59-3edcdcbe2c58 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.661635] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 920.661635] env[61905]: value = "task-1362682" [ 920.661635] env[61905]: _type = "Task" [ 920.661635] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.670625] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362682, 'name': CloneVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.689250] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 920.698346] env[61905]: DEBUG nova.network.neutron [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updated VIF entry in instance network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 920.698730] env[61905]: DEBUG nova.network.neutron [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.711360] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 920.711684] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 920.711852] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.712057] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 920.712211] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.712359] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 920.712567] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 920.712728] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 920.712894] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 920.713067] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 920.713246] env[61905]: DEBUG nova.virt.hardware [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 920.714471] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990ebbb3-4979-4161-af98-cb5facc5ad25 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.724286] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a4f334-2394-427d-ade9-8201b3e8291d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.753864] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524c1a4a-2401-e4d4-1acf-20694f240eee, 'name': SearchDatastore_Task, 'duration_secs': 0.053772} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.754198] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.754428] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 920.754665] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.754815] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.754993] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 920.755601] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8f0cf3d-3204-4692-b100-669529334107 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.767161] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 920.767258] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 920.768758] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78ec10c7-1931-41e7-935a-8a1178a52ac4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.775033] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571818} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.775688] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 4b1723a2-94a2-4070-9b47-85c9c8169137/4b1723a2-94a2-4070-9b47-85c9c8169137.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 920.776025] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 920.776383] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4873beb-8800-4a57-b60c-7901d5760004 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.779858] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 920.779858] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a40fa-3886-657c-ceb2-ead71df684d6" [ 920.779858] env[61905]: _type = "Task" [ 920.779858] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.785173] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 920.785173] env[61905]: value = "task-1362683" [ 920.785173] env[61905]: _type = "Task" [ 920.785173] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.793089] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a40fa-3886-657c-ceb2-ead71df684d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.799061] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362683, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.825275] env[61905]: INFO nova.compute.manager [-] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Took 1.26 seconds to deallocate network for instance. [ 920.965042] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "7bb17b60-268a-4670-beb8-df5232a698ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.965327] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "7bb17b60-268a-4670-beb8-df5232a698ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.030274] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6f558fc5-29f8-44e5-9e17-000ef839fb6f tempest-ServerShowV254Test-548778222 tempest-ServerShowV254Test-548778222-project-member] Lock "ebf7849c-716f-4b4c-bb9c-42c090d0b3c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.154s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.096813] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddd267-db46-039a-d969-dd6474d3f2aa/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 921.098117] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a2169e-5881-4a5a-ab0d-44a2aaf617b0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.104771] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddd267-db46-039a-d969-dd6474d3f2aa/disk-0.vmdk is in state: ready. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 921.104953] env[61905]: ERROR oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddd267-db46-039a-d969-dd6474d3f2aa/disk-0.vmdk due to incomplete transfer. [ 921.105262] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1c5f5fdd-c2a1-4225-b079-c649c983de31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.112214] env[61905]: DEBUG oslo_vmware.rw_handles [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ddd267-db46-039a-d969-dd6474d3f2aa/disk-0.vmdk. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 921.112396] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Uploaded image 4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d to the Glance image server {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 921.115115] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Destroying the VM {{(pid=61905) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 921.115410] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c7347da4-c97e-4ecd-9ef0-b7821a47122d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.123977] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 921.123977] env[61905]: value = "task-1362684" [ 921.123977] env[61905]: _type = "Task" [ 921.123977] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.133114] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362684, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.171589] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362682, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.202039] env[61905]: DEBUG oslo_concurrency.lockutils [req-4c143a8a-df95-4354-a0b4-e7d7aa831a0e req-4a495947-4f90-4c39-905d-230d982c712e service nova] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.296844] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526a40fa-3886-657c-ceb2-ead71df684d6, 'name': SearchDatastore_Task, 'duration_secs': 0.0172} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.301277] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed7a9fb5-1174-4435-a3b8-61132435a208 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.306364] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362683, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109659} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.307017] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 921.307889] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdcd050-7428-4bef-9fef-17f90f508408 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.311650] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 921.311650] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b7beed-2292-b700-2c99-d16accadcd4b" [ 921.311650] env[61905]: _type = "Task" [ 921.311650] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.332858] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 4b1723a2-94a2-4070-9b47-85c9c8169137/4b1723a2-94a2-4070-9b47-85c9c8169137.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 921.334378] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.334586] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18d9700e-9002-4dd8-a75c-a1dde47e95f3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.354398] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b7beed-2292-b700-2c99-d16accadcd4b, 'name': SearchDatastore_Task, 'duration_secs': 0.023252} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.355134] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.355444] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 63eb2219-fea2-4af0-90d2-e8d9ac53a138/63eb2219-fea2-4af0-90d2-e8d9ac53a138.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 921.355721] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca17414a-e76d-4063-a4f0-9116faf7ebda {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.362111] env[61905]: DEBUG nova.compute.manager [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Stashing vm_state: active {{(pid=61905) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 921.367432] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 921.367432] env[61905]: value = "task-1362685" [ 921.367432] env[61905]: _type = "Task" [ 921.367432] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.371987] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 921.371987] env[61905]: value = "task-1362686" [ 921.371987] env[61905]: _type = "Task" [ 921.371987] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.380330] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362685, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.384859] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362686, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.455431] env[61905]: DEBUG nova.network.neutron [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Successfully updated port: ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.468524] env[61905]: DEBUG nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 921.635112] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362684, 'name': Destroy_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.672195] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362682, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.710260] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4326c690-6224-4a4c-b3e2-2e20cf9c151f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.718328] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef983b9-ae4a-414c-9008-7cc9864a3ece {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.753282] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d785541-405e-4a81-a2c7-7e3ca3a7773a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.761916] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c5fe8b-b411-49d3-b55d-3202af052cee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.778359] env[61905]: DEBUG nova.compute.provider_tree [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.879939] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.886724] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362685, 'name': ReconfigVM_Task, 'duration_secs': 0.422309} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.887012] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362686, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.887379] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 4b1723a2-94a2-4070-9b47-85c9c8169137/4b1723a2-94a2-4070-9b47-85c9c8169137.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.887994] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de808151-622e-4ca2-8b58-0dc0c5a53eb5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.894604] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 921.894604] env[61905]: value = "task-1362687" [ 921.894604] env[61905]: _type = "Task" [ 921.894604] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.910486] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362687, 'name': Rename_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.959272] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-55a9190b-52f7-4bba-81b0-079e62537183" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.959398] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-55a9190b-52f7-4bba-81b0-079e62537183" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.959530] env[61905]: DEBUG nova.network.neutron [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.970392] env[61905]: DEBUG nova.compute.manager [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Received event network-vif-plugged-ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.970392] env[61905]: DEBUG oslo_concurrency.lockutils [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] Acquiring lock "55a9190b-52f7-4bba-81b0-079e62537183-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.970637] env[61905]: DEBUG oslo_concurrency.lockutils [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] Lock "55a9190b-52f7-4bba-81b0-079e62537183-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.970785] env[61905]: DEBUG oslo_concurrency.lockutils [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] Lock "55a9190b-52f7-4bba-81b0-079e62537183-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.970956] env[61905]: DEBUG nova.compute.manager [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] No waiting events found dispatching network-vif-plugged-ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 921.971535] env[61905]: WARNING nova.compute.manager [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Received unexpected event network-vif-plugged-ab6551f0-7329-4cd9-8d65-f6b7e18984ed for instance with vm_state building and task_state spawning. [ 921.971726] env[61905]: DEBUG nova.compute.manager [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Received event network-changed-ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 921.971891] env[61905]: DEBUG nova.compute.manager [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Refreshing instance network info cache due to event network-changed-ab6551f0-7329-4cd9-8d65-f6b7e18984ed. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 921.972076] env[61905]: DEBUG oslo_concurrency.lockutils [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] Acquiring lock "refresh_cache-55a9190b-52f7-4bba-81b0-079e62537183" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.991808] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.135093] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362684, 'name': Destroy_Task, 'duration_secs': 0.69918} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.135381] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Destroyed the VM [ 922.135642] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Deleting Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 922.135911] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-161f436b-56ce-4001-a89b-128444bf5a21 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.143215] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 922.143215] env[61905]: value = "task-1362688" [ 922.143215] env[61905]: _type = "Task" [ 922.143215] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.153032] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362688, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.174595] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362682, 'name': CloneVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.281804] env[61905]: DEBUG nova.scheduler.client.report [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 922.383506] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362686, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584266} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.383771] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 63eb2219-fea2-4af0-90d2-e8d9ac53a138/63eb2219-fea2-4af0-90d2-e8d9ac53a138.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 922.383984] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 922.384282] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c6f5d91-1682-45c4-aa44-129eb0369085 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.390425] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 922.390425] env[61905]: value = "task-1362689" [ 922.390425] env[61905]: _type = "Task" [ 922.390425] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.397982] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.405355] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362687, 'name': Rename_Task, 'duration_secs': 0.205627} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.405613] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 922.405877] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c139ce8f-cbce-451a-a067-2aa6934eb385 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.412244] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 922.412244] env[61905]: value = "task-1362690" [ 922.412244] env[61905]: _type = "Task" [ 922.412244] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.420168] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362690, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.492650] env[61905]: DEBUG nova.network.neutron [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.632869] env[61905]: DEBUG nova.network.neutron [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Updating instance_info_cache with network_info: [{"id": "ab6551f0-7329-4cd9-8d65-f6b7e18984ed", "address": "fa:16:3e:36:7d:e1", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab6551f0-73", "ovs_interfaceid": "ab6551f0-7329-4cd9-8d65-f6b7e18984ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.653360] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362688, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.673226] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362682, 'name': CloneVM_Task, 'duration_secs': 1.584439} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.673494] env[61905]: INFO nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Created linked-clone VM from snapshot [ 922.674227] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dce779-de59-42cf-ad0f-c7f4d6291bb3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.681116] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Uploading image af6a522f-8c87-46b5-bf21-04939866f8ef {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 922.701947] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 922.701947] env[61905]: value = "vm-290071" [ 922.701947] env[61905]: _type = "VirtualMachine" [ 922.701947] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 922.702257] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-78fd103b-93e4-4323-a0f4-a9b8cc147258 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.709106] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lease: (returnval){ [ 922.709106] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b3c4be-6d38-f3e8-546e-47ac464df464" [ 922.709106] env[61905]: _type = "HttpNfcLease" [ 922.709106] env[61905]: } obtained for exporting VM: (result){ [ 922.709106] env[61905]: value = "vm-290071" [ 922.709106] env[61905]: _type = "VirtualMachine" [ 922.709106] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 922.709505] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the lease: (returnval){ [ 922.709505] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b3c4be-6d38-f3e8-546e-47ac464df464" [ 922.709505] env[61905]: _type = "HttpNfcLease" [ 922.709505] env[61905]: } to be ready. {{(pid=61905) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 922.716462] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 922.716462] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b3c4be-6d38-f3e8-546e-47ac464df464" [ 922.716462] env[61905]: _type = "HttpNfcLease" [ 922.716462] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 922.787410] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.787985] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 922.790594] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.600s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.792143] env[61905]: INFO nova.compute.claims [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.900567] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063117} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.900857] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.901579] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8b331e-e03c-482e-9576-546412873f8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.924698] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 63eb2219-fea2-4af0-90d2-e8d9ac53a138/63eb2219-fea2-4af0-90d2-e8d9ac53a138.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.928050] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d750bfa9-0665-4fa1-92c2-7e9c851c4d3d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.951352] env[61905]: DEBUG oslo_vmware.api [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362690, 'name': PowerOnVM_Task, 'duration_secs': 0.461051} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.953186] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.953336] env[61905]: INFO nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Took 7.92 seconds to spawn the instance on the hypervisor. [ 922.953608] env[61905]: DEBUG nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 922.954016] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 922.954016] env[61905]: value = "task-1362692" [ 922.954016] env[61905]: _type = "Task" [ 922.954016] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.954807] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931ba840-b9e2-4fef-b8f7-26fc9fd60cd5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.965780] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362692, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.973877] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "26375621-b272-4243-95bd-5cf5b946cec4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.974626] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "26375621-b272-4243-95bd-5cf5b946cec4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.136031] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-55a9190b-52f7-4bba-81b0-079e62537183" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.136637] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Instance network_info: |[{"id": "ab6551f0-7329-4cd9-8d65-f6b7e18984ed", "address": "fa:16:3e:36:7d:e1", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab6551f0-73", "ovs_interfaceid": "ab6551f0-7329-4cd9-8d65-f6b7e18984ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 923.137096] env[61905]: DEBUG oslo_concurrency.lockutils [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] Acquired lock "refresh_cache-55a9190b-52f7-4bba-81b0-079e62537183" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.137345] env[61905]: DEBUG nova.network.neutron [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Refreshing network info cache for port ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 923.138602] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:7d:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab6551f0-7329-4cd9-8d65-f6b7e18984ed', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.146153] env[61905]: DEBUG oslo.service.loopingcall [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.147213] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.150539] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f664b2e-5355-4ebf-8073-f6404cfdf608 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.170427] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362688, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.171586] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.171586] env[61905]: value = "task-1362693" [ 923.171586] env[61905]: _type = "Task" [ 923.171586] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.179703] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362693, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.217277] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 923.217277] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b3c4be-6d38-f3e8-546e-47ac464df464" [ 923.217277] env[61905]: _type = "HttpNfcLease" [ 923.217277] env[61905]: } is ready. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 923.217716] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 923.217716] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b3c4be-6d38-f3e8-546e-47ac464df464" [ 923.217716] env[61905]: _type = "HttpNfcLease" [ 923.217716] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 923.218429] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ec4f7d-a4e4-4849-884d-4924cd509c94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.225633] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f911b-be44-557a-6f3f-830f7b749bc7/disk-0.vmdk from lease info. {{(pid=61905) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 923.226859] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f911b-be44-557a-6f3f-830f7b749bc7/disk-0.vmdk for reading. {{(pid=61905) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 923.296619] env[61905]: DEBUG nova.compute.utils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 923.299899] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 923.300108] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 923.355809] env[61905]: DEBUG nova.policy [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cfd818932a44306bec0838cb58bf483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69666592007841459c3f8f9836ef4d7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 923.411857] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7b2e08dc-5ffc-4c3e-b8e2-b19ca11729ef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.469713] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362692, 'name': ReconfigVM_Task, 'duration_secs': 0.282994} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.470020] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 63eb2219-fea2-4af0-90d2-e8d9ac53a138/63eb2219-fea2-4af0-90d2-e8d9ac53a138.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 923.470800] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7a51d7f-6f5c-4273-a102-5f6a0b72ba65 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.480632] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 923.482929] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 923.482929] env[61905]: value = "task-1362694" [ 923.482929] env[61905]: _type = "Task" [ 923.482929] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.488482] env[61905]: INFO nova.compute.manager [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Took 40.40 seconds to build instance. [ 923.494334] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362694, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.663231] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362688, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.667726] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Successfully created port: 50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.681478] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362693, 'name': CreateVM_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.800770] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 923.938849] env[61905]: DEBUG nova.network.neutron [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Updated VIF entry in instance network info cache for port ab6551f0-7329-4cd9-8d65-f6b7e18984ed. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 923.939934] env[61905]: DEBUG nova.network.neutron [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Updating instance_info_cache with network_info: [{"id": "ab6551f0-7329-4cd9-8d65-f6b7e18984ed", "address": "fa:16:3e:36:7d:e1", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab6551f0-73", "ovs_interfaceid": "ab6551f0-7329-4cd9-8d65-f6b7e18984ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.994689] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c49c57af-fbe9-450e-8d71-ac80ef3b91e5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.910s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.002902] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362694, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.014022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.073016] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc50732-8c4e-4d8d-a2d6-eefdd95124fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.081859] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4580df-51c2-461c-a163-af7ade97cfad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.114559] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98dae33-46f6-49f5-b392-7848db709aab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.122821] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5b0dcb-26ca-4c72-aaee-b2c1f9f7fc7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.140066] env[61905]: DEBUG nova.compute.provider_tree [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.162413] env[61905]: DEBUG oslo_vmware.api [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362688, 'name': RemoveSnapshot_Task, 'duration_secs': 1.793673} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.162770] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Deleted Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 924.163092] env[61905]: INFO nova.compute.manager [None req-9933ffcc-c277-4684-86d4-eed0327cb387 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Took 18.59 seconds to snapshot the instance on the hypervisor. [ 924.182768] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362693, 'name': CreateVM_Task, 'duration_secs': 0.955579} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.182950] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.183822] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.184087] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.184552] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 924.184816] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76473857-ed9d-4fa3-ad59-ad325f3a941c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.190036] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 924.190036] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f110cf-ca27-49a5-9fc8-65c0c1a4b8d9" [ 924.190036] env[61905]: _type = "Task" [ 924.190036] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.198908] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f110cf-ca27-49a5-9fc8-65c0c1a4b8d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.443875] env[61905]: DEBUG oslo_concurrency.lockutils [req-fde78854-2aca-4ac2-b20e-ad2768a4ee9b req-785e3fff-88df-4e0d-b988-280ee4641c02 service nova] Releasing lock "refresh_cache-55a9190b-52f7-4bba-81b0-079e62537183" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.494596] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362694, 'name': Rename_Task, 'duration_secs': 0.868004} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.495117] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 924.495411] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ecac08cc-0394-4176-8f88-0073559316d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.501551] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 924.501551] env[61905]: value = "task-1362695" [ 924.501551] env[61905]: _type = "Task" [ 924.501551] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.510897] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.645369] env[61905]: DEBUG nova.scheduler.client.report [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.701838] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f110cf-ca27-49a5-9fc8-65c0c1a4b8d9, 'name': SearchDatastore_Task, 'duration_secs': 0.013234} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.702173] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.702414] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.702652] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.702820] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.702984] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.703264] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31454c48-9dc1-4ef7-98a0-f051af5b1c66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.712989] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.714026] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.714199] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22292bbc-28d0-4900-98d3-8a8a105c8d99 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.719803] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 924.719803] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bafa0a-db95-ef51-abba-8b4a6af61b7b" [ 924.719803] env[61905]: _type = "Task" [ 924.719803] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.727944] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bafa0a-db95-ef51-abba-8b4a6af61b7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.816557] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 924.838661] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.839063] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.839319] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.839627] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.839893] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.840181] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.840475] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.840730] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.840950] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.841187] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.841424] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.842472] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb09eea3-c596-4deb-a8b4-2238c18a1fbc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.850973] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c1cbd9-acfc-43a8-bd2b-e6e991931883 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.016860] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362695, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.118185] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "4b1723a2-94a2-4070-9b47-85c9c8169137" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.118585] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.120387] env[61905]: INFO nova.compute.manager [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Shelving [ 925.149301] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.358s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.150154] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 925.154067] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.820s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.154359] env[61905]: DEBUG nova.objects.instance [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lazy-loading 'resources' on Instance uuid 12c21d8e-1941-4481-9216-015ba6c09b9b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.196393] env[61905]: DEBUG nova.compute.manager [req-25bc3687-f395-4303-88d2-a0a9eac9bbdc req-93b12678-cf92-464d-97a6-bcecdd9e64bd service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Received event network-vif-plugged-50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.196832] env[61905]: DEBUG oslo_concurrency.lockutils [req-25bc3687-f395-4303-88d2-a0a9eac9bbdc req-93b12678-cf92-464d-97a6-bcecdd9e64bd service nova] Acquiring lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.197217] env[61905]: DEBUG oslo_concurrency.lockutils [req-25bc3687-f395-4303-88d2-a0a9eac9bbdc req-93b12678-cf92-464d-97a6-bcecdd9e64bd service nova] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.197409] env[61905]: DEBUG oslo_concurrency.lockutils [req-25bc3687-f395-4303-88d2-a0a9eac9bbdc req-93b12678-cf92-464d-97a6-bcecdd9e64bd service nova] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.197596] env[61905]: DEBUG nova.compute.manager [req-25bc3687-f395-4303-88d2-a0a9eac9bbdc req-93b12678-cf92-464d-97a6-bcecdd9e64bd service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] No waiting events found dispatching network-vif-plugged-50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 925.197770] env[61905]: WARNING nova.compute.manager [req-25bc3687-f395-4303-88d2-a0a9eac9bbdc req-93b12678-cf92-464d-97a6-bcecdd9e64bd service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Received unexpected event network-vif-plugged-50e51588-2e01-4760-abff-6c8ee440a693 for instance with vm_state building and task_state spawning. [ 925.231182] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bafa0a-db95-ef51-abba-8b4a6af61b7b, 'name': SearchDatastore_Task, 'duration_secs': 0.013786} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.232075] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f23f111e-db5a-432a-bf69-e68f51629a73 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.238020] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 925.238020] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522d5c81-2d62-05ae-3a6c-cded2f83f4da" [ 925.238020] env[61905]: _type = "Task" [ 925.238020] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.246767] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522d5c81-2d62-05ae-3a6c-cded2f83f4da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.257650] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Successfully updated port: 50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.514034] env[61905]: DEBUG oslo_vmware.api [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362695, 'name': PowerOnVM_Task, 'duration_secs': 0.518052} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.514433] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 925.514678] env[61905]: INFO nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Took 8.13 seconds to spawn the instance on the hypervisor. [ 925.514881] env[61905]: DEBUG nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 925.515770] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b084741-e625-45ee-9cc0-015852bf2a66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.630780] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.631071] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cb59205-1f85-4305-9ffd-8367c3c4202e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.639329] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 925.639329] env[61905]: value = "task-1362696" [ 925.639329] env[61905]: _type = "Task" [ 925.639329] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.647698] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.658068] env[61905]: DEBUG nova.compute.utils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.658887] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 925.659081] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.726988] env[61905]: DEBUG nova.policy [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cfd818932a44306bec0838cb58bf483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69666592007841459c3f8f9836ef4d7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 925.752366] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522d5c81-2d62-05ae-3a6c-cded2f83f4da, 'name': SearchDatastore_Task, 'duration_secs': 0.026072} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.752366] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.752366] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 55a9190b-52f7-4bba-81b0-079e62537183/55a9190b-52f7-4bba-81b0-079e62537183.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 925.752366] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad136c3e-2143-4e0f-8c56-7fb6a974d19a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.759194] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 925.759194] env[61905]: value = "task-1362697" [ 925.759194] env[61905]: _type = "Task" [ 925.759194] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.765327] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "refresh_cache-4eba4203-0e35-4c56-b24f-3ac47a7a8b83" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.765443] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "refresh_cache-4eba4203-0e35-4c56-b24f-3ac47a7a8b83" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.765610] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.772129] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362697, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.889947] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fb6ce2-5f38-4889-9d24-d2ec039ad8a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.901318] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc3a99a-d17e-4b5e-aa9e-bb3f24029892 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.934131] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09c25d5-7415-45cb-b603-916361c110f3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.942343] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cafd4ff-44d9-4406-a545-eeb267caf761 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.956347] env[61905]: DEBUG nova.compute.provider_tree [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.035706] env[61905]: INFO nova.compute.manager [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Took 38.06 seconds to build instance. [ 926.150416] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362696, 'name': PowerOffVM_Task, 'duration_secs': 0.218683} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.150717] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.151625] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c8c079-ecb3-4e8c-8971-d16ca8d22ce0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.172705] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 926.176323] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506de840-f2c9-42cd-91fe-40c1442c5744 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.274786] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362697, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.306372] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.459879] env[61905]: DEBUG nova.scheduler.client.report [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.476938] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Successfully created port: 71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.518322] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Updating instance_info_cache with network_info: [{"id": "50e51588-2e01-4760-abff-6c8ee440a693", "address": "fa:16:3e:a5:f5:17", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e51588-2e", "ovs_interfaceid": "50e51588-2e01-4760-abff-6c8ee440a693", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.538778] env[61905]: DEBUG oslo_concurrency.lockutils [None req-35b950d3-e958-44fe-a0f0-52e23b1ab414 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.586s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.688474] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Creating Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 926.688813] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cb1422f1-09cb-42ab-a140-8dbaba20ef8d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.696747] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 926.696747] env[61905]: value = "task-1362698" [ 926.696747] env[61905]: _type = "Task" [ 926.696747] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.705712] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362698, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.769247] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362697, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640632} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.769586] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 55a9190b-52f7-4bba-81b0-079e62537183/55a9190b-52f7-4bba-81b0-079e62537183.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 926.769812] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.770093] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec5b4fcd-2109-41b0-851b-ff88fb6f9108 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.777281] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 926.777281] env[61905]: value = "task-1362699" [ 926.777281] env[61905]: _type = "Task" [ 926.777281] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.786322] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362699, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.964991] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.967799] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.088s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.007598] env[61905]: INFO nova.scheduler.client.report [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Deleted allocations for instance 12c21d8e-1941-4481-9216-015ba6c09b9b [ 927.023055] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "refresh_cache-4eba4203-0e35-4c56-b24f-3ac47a7a8b83" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.023055] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Instance network_info: |[{"id": "50e51588-2e01-4760-abff-6c8ee440a693", "address": "fa:16:3e:a5:f5:17", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e51588-2e", "ovs_interfaceid": "50e51588-2e01-4760-abff-6c8ee440a693", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 927.023055] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:f5:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50e51588-2e01-4760-abff-6c8ee440a693', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.031410] env[61905]: DEBUG oslo.service.loopingcall [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 927.032331] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.032331] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e901cc10-c5b3-482a-bdf4-ec3b11251359 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.056183] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.056183] env[61905]: value = "task-1362700" [ 927.056183] env[61905]: _type = "Task" [ 927.056183] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.064806] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362700, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.186242] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 927.210851] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362698, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.224887] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 927.225453] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 927.225897] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.226327] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 927.226701] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.227041] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 927.227472] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 927.227710] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 927.227949] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 927.228261] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 927.228518] env[61905]: DEBUG nova.virt.hardware [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.229700] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74384a91-6141-439e-b5fe-7f917a261bee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.242331] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abb6c74-7ee1-43d5-a2d4-6d930990b414 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.249777] env[61905]: DEBUG nova.compute.manager [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Received event network-changed-50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.249970] env[61905]: DEBUG nova.compute.manager [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Refreshing instance network info cache due to event network-changed-50e51588-2e01-4760-abff-6c8ee440a693. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 927.250201] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Acquiring lock "refresh_cache-4eba4203-0e35-4c56-b24f-3ac47a7a8b83" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.250368] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Acquired lock "refresh_cache-4eba4203-0e35-4c56-b24f-3ac47a7a8b83" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.250533] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Refreshing network info cache for port 50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 927.288357] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362699, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145829} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.288793] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.289726] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f942b4-6877-45f2-9a42-8c896f38f32c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.318546] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 55a9190b-52f7-4bba-81b0-079e62537183/55a9190b-52f7-4bba-81b0-079e62537183.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.318961] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c793fffd-475c-4b0f-98d9-330fe84ec2f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.342154] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 927.342154] env[61905]: value = "task-1362701" [ 927.342154] env[61905]: _type = "Task" [ 927.342154] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.351165] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362701, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.473673] env[61905]: INFO nova.compute.claims [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.515146] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9beb421f-10b6-48f0-b264-62967676d419 tempest-ServerRescueTestJSON-1273903121 tempest-ServerRescueTestJSON-1273903121-project-member] Lock "12c21d8e-1941-4481-9216-015ba6c09b9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.579s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.567337] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362700, 'name': CreateVM_Task, 'duration_secs': 0.387662} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.567512] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.568297] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.568481] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.568845] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 927.569144] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-475a0718-cb27-459d-a229-535a7e4b5fe3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.574455] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 927.574455] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528eab71-7834-d56e-da18-3275939b0abc" [ 927.574455] env[61905]: _type = "Task" [ 927.574455] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.582810] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528eab71-7834-d56e-da18-3275939b0abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.709168] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362698, 'name': CreateSnapshot_Task, 'duration_secs': 0.947468} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.709539] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Created Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 927.710424] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253b8f5e-6d5e-4f1e-8048-ea7b14627af3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.855727] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362701, 'name': ReconfigVM_Task, 'duration_secs': 0.476686} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.856150] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 55a9190b-52f7-4bba-81b0-079e62537183/55a9190b-52f7-4bba-81b0-079e62537183.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.857032] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d653b4af-c742-4231-8974-446e1415e66e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.863912] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 927.863912] env[61905]: value = "task-1362702" [ 927.863912] env[61905]: _type = "Task" [ 927.863912] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.875695] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362702, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.982190] env[61905]: INFO nova.compute.resource_tracker [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating resource usage from migration f394d276-5c21-48cd-8c02-d666e52f4cdd [ 928.091397] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528eab71-7834-d56e-da18-3275939b0abc, 'name': SearchDatastore_Task, 'duration_secs': 0.016573} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.092705] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Updated VIF entry in instance network info cache for port 50e51588-2e01-4760-abff-6c8ee440a693. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.092982] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Updating instance_info_cache with network_info: [{"id": "50e51588-2e01-4760-abff-6c8ee440a693", "address": "fa:16:3e:a5:f5:17", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e51588-2e", "ovs_interfaceid": "50e51588-2e01-4760-abff-6c8ee440a693", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.098327] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.098669] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.102485] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.102485] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.102485] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.102485] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f83a8cb-823c-463e-ab91-98a82e3fa7c0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.112233] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.112233] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.116707] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed51bd1e-7b08-4080-91a9-ebc794ec810f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.122573] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 928.122573] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525b53e7-babc-00cc-a51f-0001247c1828" [ 928.122573] env[61905]: _type = "Task" [ 928.122573] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.131614] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525b53e7-babc-00cc-a51f-0001247c1828, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.230973] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Creating linked-clone VM from snapshot {{(pid=61905) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 928.231699] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-00f4a0bc-ce49-4768-a56e-ac52aef8e7c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.235966] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1116f5e1-9a6f-4116-98d8-704be09b5450 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.245385] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0f5334-45e6-4672-8bbd-d88c9dcea11e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.249243] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 928.249243] env[61905]: value = "task-1362703" [ 928.249243] env[61905]: _type = "Task" [ 928.249243] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.281674] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3b5c32-8167-4516-94b4-f6219b2285fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.287365] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362703, 'name': CloneVM_Task} progress is 12%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.293847] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232fbb76-75c1-4897-a131-54e09f0ac7bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.311563] env[61905]: DEBUG nova.compute.provider_tree [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.377349] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362702, 'name': Rename_Task, 'duration_secs': 0.194778} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.377734] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 928.378104] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eab660f3-8953-413e-b9cf-010316a9cf14 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.385048] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 928.385048] env[61905]: value = "task-1362704" [ 928.385048] env[61905]: _type = "Task" [ 928.385048] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.395392] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362704, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.534906] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Successfully updated port: 71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.601227] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Releasing lock "refresh_cache-4eba4203-0e35-4c56-b24f-3ac47a7a8b83" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.601911] env[61905]: DEBUG nova.compute.manager [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.602106] env[61905]: DEBUG nova.compute.manager [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing instance network info cache due to event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 928.602382] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.602531] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.602698] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.638834] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525b53e7-babc-00cc-a51f-0001247c1828, 'name': SearchDatastore_Task, 'duration_secs': 0.015706} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.641718] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84fc5943-2da2-426d-a24f-433820a31fe2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.648759] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 928.648759] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c83850-8876-70e5-d8da-763b1ed8d706" [ 928.648759] env[61905]: _type = "Task" [ 928.648759] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.657553] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c83850-8876-70e5-d8da-763b1ed8d706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.761123] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362703, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.814598] env[61905]: DEBUG nova.scheduler.client.report [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.896113] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362704, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.043223] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "refresh_cache-e50cac29-797e-44a2-aafc-868e45ffd9cc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.043223] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "refresh_cache-e50cac29-797e-44a2-aafc-868e45ffd9cc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.043223] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.161029] env[61905]: DEBUG nova.compute.manager [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.161029] env[61905]: DEBUG nova.compute.manager [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing instance network info cache due to event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.161235] env[61905]: DEBUG oslo_concurrency.lockutils [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.161439] env[61905]: DEBUG oslo_concurrency.lockutils [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.161608] env[61905]: DEBUG nova.network.neutron [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.170723] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c83850-8876-70e5-d8da-763b1ed8d706, 'name': SearchDatastore_Task, 'duration_secs': 0.017334} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.171525] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.172238] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 4eba4203-0e35-4c56-b24f-3ac47a7a8b83/4eba4203-0e35-4c56-b24f-3ac47a7a8b83.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.172238] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5f48c16-2771-4ad8-8ce4-b5f7441ce14b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.181040] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 929.181040] env[61905]: value = "task-1362705" [ 929.181040] env[61905]: _type = "Task" [ 929.181040] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.189621] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.263717] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362703, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.275724] env[61905]: DEBUG nova.compute.manager [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Received event network-vif-plugged-71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.276550] env[61905]: DEBUG oslo_concurrency.lockutils [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] Acquiring lock "e50cac29-797e-44a2-aafc-868e45ffd9cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.276550] env[61905]: DEBUG oslo_concurrency.lockutils [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.276550] env[61905]: DEBUG oslo_concurrency.lockutils [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.276550] env[61905]: DEBUG nova.compute.manager [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] No waiting events found dispatching network-vif-plugged-71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 929.276800] env[61905]: WARNING nova.compute.manager [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Received unexpected event network-vif-plugged-71e6216d-55ff-4eaa-a053-1d3da0bbd7ff for instance with vm_state building and task_state spawning. [ 929.276853] env[61905]: DEBUG nova.compute.manager [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Received event network-changed-71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.277059] env[61905]: DEBUG nova.compute.manager [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Refreshing instance network info cache due to event network-changed-71e6216d-55ff-4eaa-a053-1d3da0bbd7ff. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.277710] env[61905]: DEBUG oslo_concurrency.lockutils [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] Acquiring lock "refresh_cache-e50cac29-797e-44a2-aafc-868e45ffd9cc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.319783] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.352s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.319995] env[61905]: INFO nova.compute.manager [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Migrating [ 929.320247] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.320398] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.324728] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.331s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.325138] env[61905]: INFO nova.compute.claims [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.327845] env[61905]: INFO nova.compute.rpcapi [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 929.328331] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.396416] env[61905]: DEBUG oslo_vmware.api [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362704, 'name': PowerOnVM_Task, 'duration_secs': 0.52068} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.396815] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.397048] env[61905]: INFO nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Took 8.71 seconds to spawn the instance on the hypervisor. [ 929.397278] env[61905]: DEBUG nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 929.398138] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98229397-d910-42b8-bc99-e1106de166f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.515418] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updated VIF entry in instance network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.515813] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.576981] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.699315] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362705, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.763223] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362703, 'name': CloneVM_Task} progress is 95%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.845837] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.846116] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.846357] env[61905]: DEBUG nova.network.neutron [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.924560] env[61905]: INFO nova.compute.manager [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Took 22.82 seconds to build instance. [ 929.965169] env[61905]: DEBUG nova.network.neutron [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Updating instance_info_cache with network_info: [{"id": "71e6216d-55ff-4eaa-a053-1d3da0bbd7ff", "address": "fa:16:3e:1a:e1:ac", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e6216d-55", "ovs_interfaceid": "71e6216d-55ff-4eaa-a053-1d3da0bbd7ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.971129] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "interface-0f7ccb34-cb14-4b21-ae61-b066427d400e-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.971256] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-0f7ccb34-cb14-4b21-ae61-b066427d400e-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.972861] env[61905]: DEBUG nova.objects.instance [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lazy-loading 'flavor' on Instance uuid 0f7ccb34-cb14-4b21-ae61-b066427d400e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.975523] env[61905]: DEBUG nova.network.neutron [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updated VIF entry in instance network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.975886] env[61905]: DEBUG nova.network.neutron [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.018727] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.019094] env[61905]: DEBUG nova.compute.manager [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.019330] env[61905]: DEBUG nova.compute.manager [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing instance network info cache due to event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 930.019605] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.193059] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683604} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.193374] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 4eba4203-0e35-4c56-b24f-3ac47a7a8b83/4eba4203-0e35-4c56-b24f-3ac47a7a8b83.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.193582] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.193870] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3fbeaef-4450-490c-9fc5-57d60ad8b164 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.202449] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 930.202449] env[61905]: value = "task-1362706" [ 930.202449] env[61905]: _type = "Task" [ 930.202449] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.208850] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.261681] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362703, 'name': CloneVM_Task, 'duration_secs': 1.80055} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.262063] env[61905]: INFO nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Created linked-clone VM from snapshot [ 930.263013] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac87bec-4524-4e1d-a77b-f8cdb36f93a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.273997] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Uploading image 87d546bb-0571-4919-bdca-29528219287a {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 930.305341] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 930.305341] env[61905]: value = "vm-290075" [ 930.305341] env[61905]: _type = "VirtualMachine" [ 930.305341] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 930.305661] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-18aefec7-e0c5-40d4-889b-0db93289633f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.312932] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lease: (returnval){ [ 930.312932] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524936ff-af89-faf2-3440-b560d6326073" [ 930.312932] env[61905]: _type = "HttpNfcLease" [ 930.312932] env[61905]: } obtained for exporting VM: (result){ [ 930.312932] env[61905]: value = "vm-290075" [ 930.312932] env[61905]: _type = "VirtualMachine" [ 930.312932] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 930.313355] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the lease: (returnval){ [ 930.313355] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524936ff-af89-faf2-3440-b560d6326073" [ 930.313355] env[61905]: _type = "HttpNfcLease" [ 930.313355] env[61905]: } to be ready. {{(pid=61905) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 930.320336] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.320336] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524936ff-af89-faf2-3440-b560d6326073" [ 930.320336] env[61905]: _type = "HttpNfcLease" [ 930.320336] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.427668] env[61905]: DEBUG oslo_concurrency.lockutils [None req-856b3949-13f6-4f11-bcbd-3e821066f3bf tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "55a9190b-52f7-4bba-81b0-079e62537183" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.329s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.471726] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "refresh_cache-e50cac29-797e-44a2-aafc-868e45ffd9cc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.471726] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Instance network_info: |[{"id": "71e6216d-55ff-4eaa-a053-1d3da0bbd7ff", "address": "fa:16:3e:1a:e1:ac", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e6216d-55", "ovs_interfaceid": "71e6216d-55ff-4eaa-a053-1d3da0bbd7ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 930.471726] env[61905]: DEBUG oslo_concurrency.lockutils [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] Acquired lock "refresh_cache-e50cac29-797e-44a2-aafc-868e45ffd9cc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.472059] env[61905]: DEBUG nova.network.neutron [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Refreshing network info cache for port 71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.473215] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:e1:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71e6216d-55ff-4eaa-a053-1d3da0bbd7ff', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.481667] env[61905]: DEBUG oslo.service.loopingcall [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.485644] env[61905]: DEBUG oslo_concurrency.lockutils [req-a92b60a7-6dfc-4c8a-99e8-a65811360208 req-4e060d8b-8699-4d70-ba5e-d717bc159420 service nova] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.485983] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.488672] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.488988] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.489912] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6aa76ae5-c8d9-423b-9b0a-a806d9853c78 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.514383] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.514383] env[61905]: value = "task-1362708" [ 930.514383] env[61905]: _type = "Task" [ 930.514383] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.524867] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362708, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.559849] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f93bd77-b04a-4294-8b2c-47bc9deb1bea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.571649] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82af6be-c433-4c6c-a932-abb5a2571f47 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.605443] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0e24f2-68f5-489b-aaa8-3f7fd8491bb9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.613028] env[61905]: DEBUG nova.objects.instance [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lazy-loading 'pci_requests' on Instance uuid 0f7ccb34-cb14-4b21-ae61-b066427d400e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.617021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f786cde5-e30a-4ba8-bbf4-45c80228e279 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.633195] env[61905]: DEBUG nova.compute.provider_tree [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.711402] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128223} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.711699] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.712624] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a400d99d-bf7a-4141-8f79-569f35c03a1a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.735404] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 4eba4203-0e35-4c56-b24f-3ac47a7a8b83/4eba4203-0e35-4c56-b24f-3ac47a7a8b83.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.739788] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73d18050-e423-4089-910b-c32bcd345859 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.760934] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 930.760934] env[61905]: value = "task-1362709" [ 930.760934] env[61905]: _type = "Task" [ 930.760934] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.770873] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362709, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.821165] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.821165] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524936ff-af89-faf2-3440-b560d6326073" [ 930.821165] env[61905]: _type = "HttpNfcLease" [ 930.821165] env[61905]: } is ready. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 930.821335] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 930.821335] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]524936ff-af89-faf2-3440-b560d6326073" [ 930.821335] env[61905]: _type = "HttpNfcLease" [ 930.821335] env[61905]: }. {{(pid=61905) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 930.822081] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfe9fc0-c3f8-4961-bf23-e79fcc7ac0b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.829863] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527553e4-e06c-0b78-7eac-85e427974a44/disk-0.vmdk from lease info. {{(pid=61905) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 930.830073] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527553e4-e06c-0b78-7eac-85e427974a44/disk-0.vmdk for reading. {{(pid=61905) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 930.886945] env[61905]: DEBUG nova.network.neutron [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [{"id": "3802415e-d978-40f5-8265-2e03cbdd0814", "address": "fa:16:3e:3c:3e:e6", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3802415e-d9", "ovs_interfaceid": "3802415e-d978-40f5-8265-2e03cbdd0814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.941868] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c40a3c4d-04fd-4220-9bb7-a65c902c9f56 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.951726] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updated VIF entry in instance network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.952102] env[61905]: DEBUG nova.network.neutron [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.025840] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362708, 'name': CreateVM_Task, 'duration_secs': 0.503684} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.026088] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.027036] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.027251] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.027957] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 931.028276] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b491888-3cb9-4d7c-b37b-e89e3264636d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.033228] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 931.033228] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520d77dd-dd39-be02-b63d-26728b79bae7" [ 931.033228] env[61905]: _type = "Task" [ 931.033228] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.041608] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520d77dd-dd39-be02-b63d-26728b79bae7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.122310] env[61905]: DEBUG nova.objects.base [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Object Instance<0f7ccb34-cb14-4b21-ae61-b066427d400e> lazy-loaded attributes: flavor,pci_requests {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 931.122541] env[61905]: DEBUG nova.network.neutron [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 931.136069] env[61905]: DEBUG nova.scheduler.client.report [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.268181] env[61905]: DEBUG nova.policy [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ff25da762d5421b9f1e24e4bcead22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cd0317a9e0e4f1d86c49a82e8ffbaa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 931.278876] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362709, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.323639] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "30862de5-1cfa-494a-a81d-1215a3580339" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.323917] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "30862de5-1cfa-494a-a81d-1215a3580339" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.326424] env[61905]: DEBUG nova.compute.manager [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.326624] env[61905]: DEBUG nova.compute.manager [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing instance network info cache due to event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 931.326974] env[61905]: DEBUG oslo_concurrency.lockutils [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.327140] env[61905]: DEBUG oslo_concurrency.lockutils [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.327308] env[61905]: DEBUG nova.network.neutron [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.386867] env[61905]: DEBUG nova.network.neutron [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Updated VIF entry in instance network info cache for port 71e6216d-55ff-4eaa-a053-1d3da0bbd7ff. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.387313] env[61905]: DEBUG nova.network.neutron [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Updating instance_info_cache with network_info: [{"id": "71e6216d-55ff-4eaa-a053-1d3da0bbd7ff", "address": "fa:16:3e:1a:e1:ac", "network": {"id": "0a51cc3d-58ec-4f1a-b20b-9de46f959383", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1671335526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69666592007841459c3f8f9836ef4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71e6216d-55", "ovs_interfaceid": "71e6216d-55ff-4eaa-a053-1d3da0bbd7ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.394722] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.456108] env[61905]: DEBUG oslo_concurrency.lockutils [req-2c35f399-5691-48cb-b3cb-96a58b1de13a req-f7a6513c-ab81-4e8e-b1dc-80f6b1f18d32 service nova] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.547434] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520d77dd-dd39-be02-b63d-26728b79bae7, 'name': SearchDatastore_Task, 'duration_secs': 0.014382} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.548041] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.548465] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.548857] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.549102] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.549441] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.549835] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f60354db-38df-4641-a892-ec8dcd64adb9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.560825] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.561171] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.561975] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c8f896b-cfbf-45e5-8581-c5b335b315af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.567690] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 931.567690] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bfd4df-f2ef-764f-1228-24473bdfa2e2" [ 931.567690] env[61905]: _type = "Task" [ 931.567690] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.575841] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bfd4df-f2ef-764f-1228-24473bdfa2e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.641627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.642553] env[61905]: DEBUG nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 931.645744] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.632s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.647671] env[61905]: INFO nova.compute.claims [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.772924] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362709, 'name': ReconfigVM_Task, 'duration_secs': 0.526434} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.773336] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 4eba4203-0e35-4c56-b24f-3ac47a7a8b83/4eba4203-0e35-4c56-b24f-3ac47a7a8b83.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 931.774043] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70840e98-ca62-46ba-ab92-54185cf1ed93 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.781042] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 931.781042] env[61905]: value = "task-1362710" [ 931.781042] env[61905]: _type = "Task" [ 931.781042] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.789837] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362710, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.831025] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 931.890553] env[61905]: DEBUG oslo_concurrency.lockutils [req-74fe90c0-b7cf-44e9-997d-38b770399541 req-a9f35065-884a-47c5-b1c2-fdeff51cc0f7 service nova] Releasing lock "refresh_cache-e50cac29-797e-44a2-aafc-868e45ffd9cc" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.080636] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bfd4df-f2ef-764f-1228-24473bdfa2e2, 'name': SearchDatastore_Task, 'duration_secs': 0.013503} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.081933] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62487ca2-eac2-4170-89cc-0d288f4c48ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.090328] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 932.090328] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b6401c-9804-45af-b2dc-ad1337d92856" [ 932.090328] env[61905]: _type = "Task" [ 932.090328] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.099009] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b6401c-9804-45af-b2dc-ad1337d92856, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.154634] env[61905]: DEBUG nova.compute.utils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 932.161248] env[61905]: DEBUG nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Not allocating networking since 'none' was specified. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 932.221156] env[61905]: DEBUG nova.network.neutron [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updated VIF entry in instance network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.221371] env[61905]: DEBUG nova.network.neutron [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.292560] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362710, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.356205] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.519206] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f911b-be44-557a-6f3f-830f7b749bc7/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 932.520318] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a7de1b-1d73-4d04-9977-b63cdb6f148b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.527462] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f911b-be44-557a-6f3f-830f7b749bc7/disk-0.vmdk is in state: ready. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 932.527630] env[61905]: ERROR oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f911b-be44-557a-6f3f-830f7b749bc7/disk-0.vmdk due to incomplete transfer. [ 932.527909] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1c1af82c-d022-48dd-a905-668265bf4b1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.536323] env[61905]: DEBUG oslo_vmware.rw_handles [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523f911b-be44-557a-6f3f-830f7b749bc7/disk-0.vmdk. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 932.536535] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Uploaded image af6a522f-8c87-46b5-bf21-04939866f8ef to the Glance image server {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 932.538885] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Destroying the VM {{(pid=61905) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 932.539170] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e78c83d3-9ff9-408c-81c3-529e94eb8d53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.546079] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 932.546079] env[61905]: value = "task-1362711" [ 932.546079] env[61905]: _type = "Task" [ 932.546079] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.556995] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362711, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.601399] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b6401c-9804-45af-b2dc-ad1337d92856, 'name': SearchDatastore_Task, 'duration_secs': 0.017532} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.601677] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.601942] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e50cac29-797e-44a2-aafc-868e45ffd9cc/e50cac29-797e-44a2-aafc-868e45ffd9cc.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 932.602248] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc240315-5692-4a5e-a434-3f1b517863cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.608905] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 932.608905] env[61905]: value = "task-1362712" [ 932.608905] env[61905]: _type = "Task" [ 932.608905] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.618517] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.659623] env[61905]: DEBUG nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 932.727840] env[61905]: DEBUG oslo_concurrency.lockutils [req-2cb6830b-e37c-4c95-9e40-c7d8fe13f596 req-291946c6-a547-4034-93a7-e842cde1d339 service nova] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.796766] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362710, 'name': Rename_Task, 'duration_secs': 0.67395} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.799735] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.800316] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-226a9517-9a82-4cfa-abcf-9f2d1c159024 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.807238] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 932.807238] env[61905]: value = "task-1362713" [ 932.807238] env[61905]: _type = "Task" [ 932.807238] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.818327] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.866040] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6573bfca-d38c-4c04-9314-cf23d1078d16 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.874610] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2510da9-03cb-4a42-a685-8579c953526d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.908306] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05367d86-c10f-41e9-bb7f-ae39794fa39c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.911926] env[61905]: DEBUG nova.network.neutron [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Successfully updated port: 1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.917857] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d855535-efab-4852-9819-b14d1ef22a93 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.924969] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21ff3ac-5da5-4816-91bb-33f71b2721c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.942829] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 0 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 932.957223] env[61905]: DEBUG nova.compute.provider_tree [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.055840] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362711, 'name': Destroy_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.119236] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362712, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.319341] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362713, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.417898] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.417898] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.417898] env[61905]: DEBUG nova.network.neutron [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.448902] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.449492] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67e20096-d734-452c-90be-0dbddb8aba6f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.456532] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 933.456532] env[61905]: value = "task-1362714" [ 933.456532] env[61905]: _type = "Task" [ 933.456532] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.461220] env[61905]: DEBUG nova.scheduler.client.report [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.469010] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.557206] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362711, 'name': Destroy_Task, 'duration_secs': 0.655631} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.557571] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Destroyed the VM [ 933.557930] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deleting Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 933.558564] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a59b240e-f6e6-45bc-bfdf-964bfa33c36c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.564843] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 933.564843] env[61905]: value = "task-1362715" [ 933.564843] env[61905]: _type = "Task" [ 933.564843] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.573158] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362715, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.621157] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728455} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.621414] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e50cac29-797e-44a2-aafc-868e45ffd9cc/e50cac29-797e-44a2-aafc-868e45ffd9cc.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.621624] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.621888] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37937351-9369-4854-a0c6-6dc3d2f7bf75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.629070] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 933.629070] env[61905]: value = "task-1362716" [ 933.629070] env[61905]: _type = "Task" [ 933.629070] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.637585] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.648064] env[61905]: DEBUG nova.compute.manager [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-vif-plugged-1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.648064] env[61905]: DEBUG oslo_concurrency.lockutils [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.648064] env[61905]: DEBUG oslo_concurrency.lockutils [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.648064] env[61905]: DEBUG oslo_concurrency.lockutils [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.648064] env[61905]: DEBUG nova.compute.manager [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] No waiting events found dispatching network-vif-plugged-1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 933.648769] env[61905]: WARNING nova.compute.manager [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received unexpected event network-vif-plugged-1a2f5768-0301-4ff2-a1ae-e02fe03be64b for instance with vm_state active and task_state None. [ 933.648769] env[61905]: DEBUG nova.compute.manager [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-changed-1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.648769] env[61905]: DEBUG nova.compute.manager [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing instance network info cache due to event network-changed-1a2f5768-0301-4ff2-a1ae-e02fe03be64b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 933.648769] env[61905]: DEBUG oslo_concurrency.lockutils [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.671655] env[61905]: DEBUG nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 933.696998] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 933.697337] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 933.697526] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.697764] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 933.697962] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.698137] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 933.698365] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 933.698569] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 933.698825] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 933.699076] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 933.699306] env[61905]: DEBUG nova.virt.hardware [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 933.700345] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd79291c-ac3c-41a8-b395-9d9094833c9c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.708707] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45794a9c-0000-4c19-979a-9591d0d76739 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.723048] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.728874] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Creating folder: Project (8b2f2a5b486f4b4daff8854d4bfea87d). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 933.729204] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7872fb83-fbfc-4421-a591-607356cda0d1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.740966] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Created folder: Project (8b2f2a5b486f4b4daff8854d4bfea87d) in parent group-v289968. [ 933.741080] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Creating folder: Instances. Parent ref: group-v290077. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 933.741338] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4834db6-10f1-49fc-9c14-c9a29ff90aae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.751597] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Created folder: Instances in parent group-v290077. [ 933.751967] env[61905]: DEBUG oslo.service.loopingcall [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.752236] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.752499] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8c709a1-8f59-43b6-815d-03bfed3c06fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.770461] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.770461] env[61905]: value = "task-1362719" [ 933.770461] env[61905]: _type = "Task" [ 933.770461] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.781673] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362719, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.818883] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362713, 'name': PowerOnVM_Task, 'duration_secs': 0.955294} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.819258] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.819506] env[61905]: INFO nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Took 9.00 seconds to spawn the instance on the hypervisor. [ 933.819759] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 933.820975] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54dc73e-24fd-4ab5-98ec-b2fe60f7d797 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.962475] env[61905]: WARNING nova.network.neutron [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] dd057eb4-847f-4d06-88bd-a25b4fd8db1f already exists in list: networks containing: ['dd057eb4-847f-4d06-88bd-a25b4fd8db1f']. ignoring it [ 933.968394] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.968953] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 933.971623] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362714, 'name': PowerOffVM_Task, 'duration_secs': 0.2598} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.972393] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.616s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.973539] env[61905]: INFO nova.compute.claims [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.975846] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.976011] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 17 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 934.075739] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362715, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.138792] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08169} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.141359] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.142250] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6751e3b-1416-4b38-8819-5ef29c0520f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.166785] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] e50cac29-797e-44a2-aafc-868e45ffd9cc/e50cac29-797e-44a2-aafc-868e45ffd9cc.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.169927] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecfe0347-98e7-4150-a55f-434562bc48a4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.191799] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 934.191799] env[61905]: value = "task-1362720" [ 934.191799] env[61905]: _type = "Task" [ 934.191799] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.202087] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362720, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.280671] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362719, 'name': CreateVM_Task, 'duration_secs': 0.343399} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.280869] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 934.281322] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.281487] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.281850] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.282110] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e263af2b-dded-49b0-99c7-294c7f9c1062 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.287167] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 934.287167] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52980075-c2fc-f5b7-f8e6-073873920850" [ 934.287167] env[61905]: _type = "Task" [ 934.287167] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.295251] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52980075-c2fc-f5b7-f8e6-073873920850, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.338790] env[61905]: INFO nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Took 21.19 seconds to build instance. [ 934.341656] env[61905]: DEBUG nova.network.neutron [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "address": "fa:16:3e:be:37:76", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a2f5768-03", "ovs_interfaceid": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.475053] env[61905]: DEBUG nova.compute.utils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.476216] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 934.476478] env[61905]: DEBUG nova.network.neutron [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 934.483799] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:12:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.484047] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.484212] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.484399] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.484548] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.484698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.484903] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.485120] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.485252] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.485391] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.485559] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.491843] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cca73f27-a877-41f7-a326-489b91362787 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.509664] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 934.509664] env[61905]: value = "task-1362721" [ 934.509664] env[61905]: _type = "Task" [ 934.509664] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.519847] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.521558] env[61905]: DEBUG nova.policy [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ca67104cdbd4ac9be9a57bb19846925', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7af9072624d04f669e8183581e6ca50a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 934.575889] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362715, 'name': RemoveSnapshot_Task, 'duration_secs': 0.603244} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.576233] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deleted Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 934.576514] env[61905]: DEBUG nova.compute.manager [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 934.577343] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5532525-6746-4215-a400-81f2901b9544 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.701860] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362720, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.799145] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52980075-c2fc-f5b7-f8e6-073873920850, 'name': SearchDatastore_Task, 'duration_secs': 0.014682} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.799475] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.799795] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.800055] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.800210] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.800392] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.800717] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-182d2164-98ae-44c8-b74a-41bfcadc3257 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.809301] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.809494] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 934.810257] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48ee3d6e-3767-4086-90e4-a1b94f4a00f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.815677] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 934.815677] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52135091-d2c5-7e21-8123-efe378be84b1" [ 934.815677] env[61905]: _type = "Task" [ 934.815677] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.823533] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52135091-d2c5-7e21-8123-efe378be84b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.841243] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.704s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.843940] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.844833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.844996] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.845307] env[61905]: DEBUG oslo_concurrency.lockutils [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.845495] env[61905]: DEBUG nova.network.neutron [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing network info cache for port 1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.847330] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42267947-d7a0-40ee-9952-7a53dfc7a3de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.866023] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.866179] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.866347] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.866534] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.866684] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.866830] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.867046] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.867212] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.867379] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.867571] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.867769] env[61905]: DEBUG nova.virt.hardware [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.874176] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Reconfiguring VM to attach interface {{(pid=61905) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 934.875199] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8693bcbc-c226-400a-8861-2110c85aa1c2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.888043] env[61905]: DEBUG nova.network.neutron [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Successfully created port: 262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 934.895924] env[61905]: DEBUG oslo_vmware.api [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 934.895924] env[61905]: value = "task-1362722" [ 934.895924] env[61905]: _type = "Task" [ 934.895924] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.904346] env[61905]: DEBUG oslo_vmware.api [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362722, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.979847] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 935.021265] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362721, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.089792] env[61905]: INFO nova.compute.manager [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Shelve offloading [ 935.091720] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.092064] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5b6a760-2724-468c-b131-d2c27b3401de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.102352] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 935.102352] env[61905]: value = "task-1362723" [ 935.102352] env[61905]: _type = "Task" [ 935.102352] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.111128] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] VM already powered off {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 935.111338] env[61905]: DEBUG nova.compute.manager [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 935.112208] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe8aeb8-2d8c-4102-849a-916645ebdfcd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.121801] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.121801] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.121801] env[61905]: DEBUG nova.network.neutron [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.202398] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362720, 'name': ReconfigVM_Task, 'duration_secs': 0.560789} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.202699] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Reconfigured VM instance instance-00000059 to attach disk [datastore2] e50cac29-797e-44a2-aafc-868e45ffd9cc/e50cac29-797e-44a2-aafc-868e45ffd9cc.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.203409] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3bb95c8-2f1d-4c38-8d7c-edf0b9cb15ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.210310] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 935.210310] env[61905]: value = "task-1362724" [ 935.210310] env[61905]: _type = "Task" [ 935.210310] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.219039] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362724, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.220381] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeeeee6-e1d9-4c47-9a4c-41e3096b3d80 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.227596] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9eccde-c217-4c8e-92ff-5ccae3b1107d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.259885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d48678e-5e06-4568-be43-c817a9b69a2d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.269252] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc7d1e3-9681-494e-ab84-c6c6929275e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.284994] env[61905]: DEBUG nova.compute.provider_tree [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.327748] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52135091-d2c5-7e21-8123-efe378be84b1, 'name': SearchDatastore_Task, 'duration_secs': 0.015048} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.329598] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d13c5bde-1e58-4f21-999d-ed4d42178b8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.335768] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 935.335768] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52401c9a-28a3-95af-278e-93691f0f32e9" [ 935.335768] env[61905]: _type = "Task" [ 935.335768] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.345607] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52401c9a-28a3-95af-278e-93691f0f32e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.410740] env[61905]: DEBUG oslo_vmware.api [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.521543] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362721, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.599463] env[61905]: DEBUG nova.network.neutron [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updated VIF entry in instance network info cache for port 1a2f5768-0301-4ff2-a1ae-e02fe03be64b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.600113] env[61905]: DEBUG nova.network.neutron [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "address": "fa:16:3e:be:37:76", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a2f5768-03", "ovs_interfaceid": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.722429] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362724, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.788582] env[61905]: DEBUG nova.scheduler.client.report [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 935.849604] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52401c9a-28a3-95af-278e-93691f0f32e9, 'name': SearchDatastore_Task, 'duration_secs': 0.018005} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.850009] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.853548] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.856817] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbcf50a4-21d6-4bcb-a23b-b02fcc07e0d7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.870369] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 935.870369] env[61905]: value = "task-1362725" [ 935.870369] env[61905]: _type = "Task" [ 935.870369] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.880650] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.906356] env[61905]: DEBUG oslo_vmware.api [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362722, 'name': ReconfigVM_Task, 'duration_secs': 0.687921} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.906939] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.907181] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Reconfigured VM to attach interface {{(pid=61905) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 935.991299] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 936.021055] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4b610b6fff0d4a126c4913fc45ddf35c',container_format='bare',created_at=2024-10-25T05:20:14Z,direct_url=,disk_format='vmdk',id=4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d,min_disk=1,min_ram=0,name='tempest-test-snap-234595262',owner='7af9072624d04f669e8183581e6ca50a',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-25T05:20:31Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.021386] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.021491] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.021676] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.021902] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.021989] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.022261] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.022425] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.022593] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.022756] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.022927] env[61905]: DEBUG nova.virt.hardware [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.023751] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed980b8-7e96-44df-9a4c-2b2791e4d514 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.029425] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362721, 'name': ReconfigVM_Task, 'duration_secs': 1.205951} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.030164] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 33 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 936.036677] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ea1ed-bfd1-45ea-8bde-d9f38d704e3a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.103529] env[61905]: DEBUG oslo_concurrency.lockutils [req-8124ba85-f6d8-4b9f-b6e3-51f87c9d19a0 req-ea2fc3bd-0af0-4e4b-be84-8f4b82a68bb5 service nova] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.104985] env[61905]: DEBUG nova.network.neutron [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.224286] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362724, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.294517] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.295098] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 936.380713] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.412176] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8b22506f-2b49-40fc-a8db-51a0e09ee316 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-0f7ccb34-cb14-4b21-ae61-b066427d400e-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.441s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 936.539698] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 936.540072] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 936.540072] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 936.540072] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 936.540237] env[61905]: DEBUG nova.virt.hardware [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 936.545933] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Reconfiguring VM instance instance-00000053 to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 936.545933] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6910f44-cf99-412f-ada2-7ac834889d08 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.568464] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 936.568464] env[61905]: value = "task-1362726" [ 936.568464] env[61905]: _type = "Task" [ 936.568464] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.578843] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.609230] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.727423] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362724, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.754441] env[61905]: DEBUG nova.compute.manager [req-7067671a-364f-4152-b8af-7701b3de2696 req-3fe5a6a3-1539-4cae-b83b-1a8dcbdd03f8 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Received event network-vif-plugged-262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.754852] env[61905]: DEBUG oslo_concurrency.lockutils [req-7067671a-364f-4152-b8af-7701b3de2696 req-3fe5a6a3-1539-4cae-b83b-1a8dcbdd03f8 service nova] Acquiring lock "26375621-b272-4243-95bd-5cf5b946cec4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.754923] env[61905]: DEBUG oslo_concurrency.lockutils [req-7067671a-364f-4152-b8af-7701b3de2696 req-3fe5a6a3-1539-4cae-b83b-1a8dcbdd03f8 service nova] Lock "26375621-b272-4243-95bd-5cf5b946cec4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.755085] env[61905]: DEBUG oslo_concurrency.lockutils [req-7067671a-364f-4152-b8af-7701b3de2696 req-3fe5a6a3-1539-4cae-b83b-1a8dcbdd03f8 service nova] Lock "26375621-b272-4243-95bd-5cf5b946cec4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.755281] env[61905]: DEBUG nova.compute.manager [req-7067671a-364f-4152-b8af-7701b3de2696 req-3fe5a6a3-1539-4cae-b83b-1a8dcbdd03f8 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] No waiting events found dispatching network-vif-plugged-262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 936.755437] env[61905]: WARNING nova.compute.manager [req-7067671a-364f-4152-b8af-7701b3de2696 req-3fe5a6a3-1539-4cae-b83b-1a8dcbdd03f8 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Received unexpected event network-vif-plugged-262f0a20-88eb-4d13-a3d7-3033ab16713f for instance with vm_state building and task_state spawning. [ 936.756438] env[61905]: DEBUG nova.network.neutron [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Successfully updated port: 262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.802027] env[61905]: DEBUG nova.compute.utils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 936.805119] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 936.805373] env[61905]: DEBUG nova.network.neutron [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 936.876696] env[61905]: DEBUG nova.policy [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 936.886587] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.082499] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.226036] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362724, 'name': Rename_Task, 'duration_secs': 1.722697} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.226036] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.226036] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a52b1884-beba-4368-9292-bd5b29c6deae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.240825] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 937.240825] env[61905]: value = "task-1362727" [ 937.240825] env[61905]: _type = "Task" [ 937.240825] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.253061] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.259514] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "refresh_cache-26375621-b272-4243-95bd-5cf5b946cec4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.259514] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "refresh_cache-26375621-b272-4243-95bd-5cf5b946cec4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.259514] env[61905]: DEBUG nova.network.neutron [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.309634] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 937.388476] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362725, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.402215] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.403803] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf79915f-5db1-4d2b-a3bd-7f487c780df4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.415400] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.415778] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8bd7944c-2351-4cb7-9743-42cf7d5cb02e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.520674] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.520967] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.521233] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleting the datastore file [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.521577] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-528506dd-ffbd-4399-abd7-89ba9b05ac28 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.530829] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 937.530829] env[61905]: value = "task-1362729" [ 937.530829] env[61905]: _type = "Task" [ 937.530829] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.541412] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362729, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.582802] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362726, 'name': ReconfigVM_Task, 'duration_secs': 0.59009} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.583194] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Reconfigured VM instance instance-00000053 to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 937.584335] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad7efc7-d12e-4ab5-8b34-5d8d5c6d5909 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.623067] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f/b9400202-eb37-4c75-bbf3-807edb7bc16f.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.623067] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78d1bbc1-9a09-4c54-bdbb-d644b7fec39f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.640517] env[61905]: DEBUG nova.network.neutron [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Successfully created port: 3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.650844] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 937.650844] env[61905]: value = "task-1362730" [ 937.650844] env[61905]: _type = "Task" [ 937.650844] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.667073] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362730, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.750040] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362727, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.796691] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "interface-0f7ccb34-cb14-4b21-ae61-b066427d400e-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.797552] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-0f7ccb34-cb14-4b21-ae61-b066427d400e-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.830162] env[61905]: DEBUG nova.network.neutron [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.886265] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362725, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.613744} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.886559] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 937.886896] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 937.887263] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a35e99f-2690-4c25-b3ec-0556bd124133 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.896714] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 937.896714] env[61905]: value = "task-1362731" [ 937.896714] env[61905]: _type = "Task" [ 937.896714] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.906962] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.041874] env[61905]: DEBUG oslo_vmware.api [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362729, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.454513} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.042160] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.042349] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.042546] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.075141] env[61905]: INFO nova.scheduler.client.report [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleted allocations for instance 1502df44-9166-4ce8-9117-a57e7be2d299 [ 938.108792] env[61905]: DEBUG nova.network.neutron [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Updating instance_info_cache with network_info: [{"id": "262f0a20-88eb-4d13-a3d7-3033ab16713f", "address": "fa:16:3e:89:da:45", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262f0a20-88", "ovs_interfaceid": "262f0a20-88eb-4d13-a3d7-3033ab16713f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.162394] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.250094] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362727, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.301535] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.301535] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.302469] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997a1990-06b8-4ba9-8706-adfb662a002d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.323759] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 938.327118] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42646eeb-e90f-40dd-84fa-5a8a2277a0f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.370290] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Reconfiguring VM to detach interface {{(pid=61905) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 938.372862] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.373129] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.373279] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.373464] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.373611] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.373759] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.373967] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.374147] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.374309] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.374471] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.374644] env[61905]: DEBUG nova.virt.hardware [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.374966] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b69dd0f9-9ef6-4ac2-bf25-983cc0b7a160 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.388977] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43035d1a-bb7e-40c1-b047-34c4304e32f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.402556] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3893671c-7f81-41be-83c2-642013e4ea4e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.408304] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 938.408304] env[61905]: value = "task-1362732" [ 938.408304] env[61905]: _type = "Task" [ 938.408304] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.424706] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131067} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.425454] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.426303] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e80d306-a3be-4467-84b2-67897d9230cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.431980] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.462049] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.462573] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fd691f5-ead0-4f1e-b878-97cc0e8cffef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.496280] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 938.496280] env[61905]: value = "task-1362733" [ 938.496280] env[61905]: _type = "Task" [ 938.496280] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.507387] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362733, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.579894] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.580210] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.580431] env[61905]: DEBUG nova.objects.instance [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lazy-loading 'resources' on Instance uuid 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.612401] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "refresh_cache-26375621-b272-4243-95bd-5cf5b946cec4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.612683] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Instance network_info: |[{"id": "262f0a20-88eb-4d13-a3d7-3033ab16713f", "address": "fa:16:3e:89:da:45", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262f0a20-88", "ovs_interfaceid": "262f0a20-88eb-4d13-a3d7-3033ab16713f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 938.612893] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:da:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '262f0a20-88eb-4d13-a3d7-3033ab16713f', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 938.620560] env[61905]: DEBUG oslo.service.loopingcall [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.620849] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.621479] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1577782-a08c-4429-bea0-13e29fe09619 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.644873] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.644873] env[61905]: value = "task-1362734" [ 938.644873] env[61905]: _type = "Task" [ 938.644873] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.661142] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362734, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.664465] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362730, 'name': ReconfigVM_Task, 'duration_secs': 0.699673} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.664779] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Reconfigured VM instance instance-00000053 to attach disk [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f/b9400202-eb37-4c75-bbf3-807edb7bc16f.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.665147] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 50 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 938.753819] env[61905]: DEBUG oslo_vmware.api [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362727, 'name': PowerOnVM_Task, 'duration_secs': 1.074358} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.754292] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.754660] env[61905]: INFO nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Took 11.57 seconds to spawn the instance on the hypervisor. [ 938.754959] env[61905]: DEBUG nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.756209] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87488db-8caa-4890-8aa3-5a0657b730a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.837741] env[61905]: DEBUG nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Received event network-changed-262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 938.838240] env[61905]: DEBUG nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Refreshing instance network info cache due to event network-changed-262f0a20-88eb-4d13-a3d7-3033ab16713f. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 938.838521] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Acquiring lock "refresh_cache-26375621-b272-4243-95bd-5cf5b946cec4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.838678] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Acquired lock "refresh_cache-26375621-b272-4243-95bd-5cf5b946cec4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.838864] env[61905]: DEBUG nova.network.neutron [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Refreshing network info cache for port 262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.920670] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.007883] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362733, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.083387] env[61905]: DEBUG nova.objects.instance [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lazy-loading 'numa_topology' on Instance uuid 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.155877] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362734, 'name': CreateVM_Task, 'duration_secs': 0.421007} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.156251] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 939.156790] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.156966] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.157359] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 939.157648] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8c4e8a5-484b-4ecf-abd6-a59a1555ba9d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.163805] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 939.163805] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52262a9c-4fca-2180-f88d-a40bde56340d" [ 939.163805] env[61905]: _type = "Task" [ 939.163805] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.173349] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6ed50f-ac5e-425f-8437-bc4263f31fe5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.198082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.198466] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Processing image 4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.198790] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.198998] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.199238] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.199713] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-399983ab-c618-4627-8bb6-f9dfc80038a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.202340] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16114332-022f-4177-82a7-681add6e30ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.222104] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 67 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 939.231056] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.231389] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.232489] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08da806c-2183-4e8d-a5ad-5feb2841e5d5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.241181] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 939.241181] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522d1c68-6e37-9de2-cb10-30d3934470b5" [ 939.241181] env[61905]: _type = "Task" [ 939.241181] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.255707] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522d1c68-6e37-9de2-cb10-30d3934470b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.277758] env[61905]: INFO nova.compute.manager [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Took 26.10 seconds to build instance. [ 939.421533] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.499503] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527553e4-e06c-0b78-7eac-85e427974a44/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 939.500516] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac7cacb-b3b6-48ea-b37b-5d0c1577bd1b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.512022] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362733, 'name': ReconfigVM_Task, 'duration_secs': 0.566307} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.513606] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 939.514437] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527553e4-e06c-0b78-7eac-85e427974a44/disk-0.vmdk is in state: ready. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 939.514437] env[61905]: ERROR oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527553e4-e06c-0b78-7eac-85e427974a44/disk-0.vmdk due to incomplete transfer. [ 939.515177] env[61905]: DEBUG nova.network.neutron [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Successfully updated port: 3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 939.516370] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a889dd9-3914-4b4f-aef1-ca47569eabb4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.517732] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-52329634-256e-4066-a9dc-bfed86819e6a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.528573] env[61905]: DEBUG oslo_vmware.rw_handles [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527553e4-e06c-0b78-7eac-85e427974a44/disk-0.vmdk. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 939.528790] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Uploaded image 87d546bb-0571-4919-bdca-29528219287a to the Glance image server {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 939.530649] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Destroying the VM {{(pid=61905) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 939.532223] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b2a3c05c-1a76-4b4d-95c9-7d269769a230 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.533879] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 939.533879] env[61905]: value = "task-1362735" [ 939.533879] env[61905]: _type = "Task" [ 939.533879] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.539896] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 939.539896] env[61905]: value = "task-1362736" [ 939.539896] env[61905]: _type = "Task" [ 939.539896] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.549220] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362735, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.554609] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362736, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.586264] env[61905]: DEBUG nova.objects.base [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Object Instance<1502df44-9166-4ce8-9117-a57e7be2d299> lazy-loaded attributes: resources,numa_topology {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 939.683344] env[61905]: DEBUG nova.network.neutron [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Updated VIF entry in instance network info cache for port 262f0a20-88eb-4d13-a3d7-3033ab16713f. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.683797] env[61905]: DEBUG nova.network.neutron [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Updating instance_info_cache with network_info: [{"id": "262f0a20-88eb-4d13-a3d7-3033ab16713f", "address": "fa:16:3e:89:da:45", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap262f0a20-88", "ovs_interfaceid": "262f0a20-88eb-4d13-a3d7-3033ab16713f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.736497] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.736976] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.736976] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.737180] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.737376] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.743557] env[61905]: INFO nova.compute.manager [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Terminating instance [ 939.748340] env[61905]: DEBUG nova.compute.manager [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 939.748531] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.749302] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae61a1ce-c76b-4b2a-92ac-d9e7a9da7388 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.761498] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Preparing fetch location {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 939.762759] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Fetch image to [datastore1] OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253/OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253.vmdk {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 939.762759] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Downloading stream optimized image 4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d to [datastore1] OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253/OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253.vmdk on the data store datastore1 as vApp {{(pid=61905) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 939.762759] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Downloading image file data 4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d to the ESX as VM named 'OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253' {{(pid=61905) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 939.765253] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.765788] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-beee6da1-0f5d-477b-b74a-8064dd1feea8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.777973] env[61905]: DEBUG oslo_vmware.api [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 939.777973] env[61905]: value = "task-1362737" [ 939.777973] env[61905]: _type = "Task" [ 939.777973] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.784341] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0d1f78d-02b1-4f88-abe5-6c9c05364576 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.617s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.792201] env[61905]: DEBUG oslo_vmware.api [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.812812] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "e50cac29-797e-44a2-aafc-868e45ffd9cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.813442] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.813442] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "e50cac29-797e-44a2-aafc-868e45ffd9cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.813442] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.813675] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.816080] env[61905]: INFO nova.compute.manager [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Terminating instance [ 939.818580] env[61905]: DEBUG nova.compute.manager [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 939.819332] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.819969] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcf27ea-6990-4fae-af46-d34a8aba7be8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.826846] env[61905]: DEBUG nova.network.neutron [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Port 3802415e-d978-40f5-8265-2e03cbdd0814 binding to destination host cpu-1 is already ACTIVE {{(pid=61905) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 939.837306] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.837306] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2cf555e-c0a5-4709-b97f-2c897b789744 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.844553] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e923d98e-a6fb-497c-ad98-073e05c7ce54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.850970] env[61905]: DEBUG oslo_vmware.api [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 939.850970] env[61905]: value = "task-1362738" [ 939.850970] env[61905]: _type = "Task" [ 939.850970] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.866700] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 939.866700] env[61905]: value = "resgroup-9" [ 939.866700] env[61905]: _type = "ResourcePool" [ 939.866700] env[61905]: }. {{(pid=61905) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 939.867709] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd45533-1d98-4b63-a5e3-5e6d12e69af1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.871490] env[61905]: DEBUG oslo_vmware.api [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362738, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.871942] env[61905]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2248146b-3c58-4ed6-8c13-d23c540fc9e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.926244] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c916730b-94e5-4217-b36d-9ca20963a38b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.929288] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lease: (returnval){ [ 939.929288] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a4c3b-c2ca-cddd-0e56-5e4f53d1e3dc" [ 939.929288] env[61905]: _type = "HttpNfcLease" [ 939.929288] env[61905]: } obtained for vApp import into resource pool (val){ [ 939.929288] env[61905]: value = "resgroup-9" [ 939.929288] env[61905]: _type = "ResourcePool" [ 939.929288] env[61905]: }. {{(pid=61905) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 939.929820] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the lease: (returnval){ [ 939.929820] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a4c3b-c2ca-cddd-0e56-5e4f53d1e3dc" [ 939.929820] env[61905]: _type = "HttpNfcLease" [ 939.929820] env[61905]: } to be ready. {{(pid=61905) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 939.943810] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.946468] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe9749b-ef77-4540-83ab-f014dd62469b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.955216] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 939.955216] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a4c3b-c2ca-cddd-0e56-5e4f53d1e3dc" [ 939.955216] env[61905]: _type = "HttpNfcLease" [ 939.955216] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 939.970980] env[61905]: DEBUG nova.compute.provider_tree [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.021995] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-30862de5-1cfa-494a-a81d-1215a3580339" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.022181] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-30862de5-1cfa-494a-a81d-1215a3580339" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.022340] env[61905]: DEBUG nova.network.neutron [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.050523] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362736, 'name': Destroy_Task, 'duration_secs': 0.464075} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.053617] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Destroyed the VM [ 940.053921] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Deleting Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 940.054501] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362735, 'name': Rename_Task, 'duration_secs': 0.175801} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.054741] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-239c342e-db98-4c84-aa6b-f822fb6f0ef2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.056810] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 940.057037] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a15cb68-fea6-4dc7-a5ee-ed39223f9ebc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.065569] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 940.065569] env[61905]: value = "task-1362741" [ 940.065569] env[61905]: _type = "Task" [ 940.065569] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.071329] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 940.071329] env[61905]: value = "task-1362740" [ 940.071329] env[61905]: _type = "Task" [ 940.071329] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.081056] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362741, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.084183] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.187474] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Releasing lock "refresh_cache-26375621-b272-4243-95bd-5cf5b946cec4" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.187931] env[61905]: DEBUG nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-vif-unplugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.188443] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.188760] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.189017] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.189318] env[61905]: DEBUG nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] No waiting events found dispatching network-vif-unplugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.189948] env[61905]: WARNING nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received unexpected event network-vif-unplugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 for instance with vm_state shelved_offloaded and task_state None. [ 940.189948] env[61905]: DEBUG nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.190169] env[61905]: DEBUG nova.compute.manager [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing instance network info cache due to event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.190410] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.190654] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.190823] env[61905]: DEBUG nova.network.neutron [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.289884] env[61905]: DEBUG oslo_vmware.api [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362737, 'name': PowerOffVM_Task, 'duration_secs': 0.300978} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.290235] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.290453] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.290774] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c12ec0e2-7d8f-4755-93dc-5a13cfc1d7bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.365990] env[61905]: DEBUG oslo_vmware.api [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362738, 'name': PowerOffVM_Task, 'duration_secs': 0.279225} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.366675] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.367435] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.367435] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9107f892-3ed4-4dbd-9ff1-3f12c6d55251 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.375678] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.375976] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.376221] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleting the datastore file [datastore2] 4eba4203-0e35-4c56-b24f-3ac47a7a8b83 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.376504] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b0fba75-5530-4414-be43-790d43101954 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.386938] env[61905]: DEBUG oslo_vmware.api [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 940.386938] env[61905]: value = "task-1362744" [ 940.386938] env[61905]: _type = "Task" [ 940.386938] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.397286] env[61905]: DEBUG oslo_vmware.api [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.431542] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.440801] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 940.440801] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a4c3b-c2ca-cddd-0e56-5e4f53d1e3dc" [ 940.440801] env[61905]: _type = "HttpNfcLease" [ 940.440801] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 940.474551] env[61905]: DEBUG nova.scheduler.client.report [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.571081] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.571324] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.571506] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleting the datastore file [datastore2] e50cac29-797e-44a2-aafc-868e45ffd9cc {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.572585] env[61905]: DEBUG nova.network.neutron [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.574611] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2af2312-5d26-4e56-950c-a01be08b0dcf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.587575] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362741, 'name': RemoveSnapshot_Task, 'duration_secs': 0.500614} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.592338] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Deleted Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 940.592635] env[61905]: DEBUG nova.compute.manager [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 940.592975] env[61905]: DEBUG oslo_vmware.api [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for the task: (returnval){ [ 940.592975] env[61905]: value = "task-1362745" [ 940.592975] env[61905]: _type = "Task" [ 940.592975] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.593238] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362740, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.593924] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a767bc62-70e4-4a6a-b083-9a28cb736a1c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.615963] env[61905]: DEBUG oslo_vmware.api [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.855518] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.855840] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.856088] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.867234] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.875244] env[61905]: DEBUG nova.compute.manager [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Received event network-vif-plugged-3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.875481] env[61905]: DEBUG oslo_concurrency.lockutils [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] Acquiring lock "30862de5-1cfa-494a-a81d-1215a3580339-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.875727] env[61905]: DEBUG oslo_concurrency.lockutils [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] Lock "30862de5-1cfa-494a-a81d-1215a3580339-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.875869] env[61905]: DEBUG oslo_concurrency.lockutils [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] Lock "30862de5-1cfa-494a-a81d-1215a3580339-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.876016] env[61905]: DEBUG nova.compute.manager [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] No waiting events found dispatching network-vif-plugged-3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 940.876186] env[61905]: WARNING nova.compute.manager [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Received unexpected event network-vif-plugged-3892e570-77e4-46de-8f2b-a098cd36d007 for instance with vm_state building and task_state spawning. [ 940.876349] env[61905]: DEBUG nova.compute.manager [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Received event network-changed-3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 940.876499] env[61905]: DEBUG nova.compute.manager [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Refreshing instance network info cache due to event network-changed-3892e570-77e4-46de-8f2b-a098cd36d007. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 940.876664] env[61905]: DEBUG oslo_concurrency.lockutils [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] Acquiring lock "refresh_cache-30862de5-1cfa-494a-a81d-1215a3580339" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.886290] env[61905]: DEBUG nova.network.neutron [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Updating instance_info_cache with network_info: [{"id": "3892e570-77e4-46de-8f2b-a098cd36d007", "address": "fa:16:3e:13:c3:3f", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3892e570-77", "ovs_interfaceid": "3892e570-77e4-46de-8f2b-a098cd36d007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.898234] env[61905]: DEBUG oslo_vmware.api [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252669} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.899228] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.899228] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.899420] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.899642] env[61905]: INFO nova.compute.manager [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Took 1.15 seconds to destroy the instance on the hypervisor. [ 940.899828] env[61905]: DEBUG oslo.service.loopingcall [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.900271] env[61905]: DEBUG nova.compute.manager [-] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 940.900367] env[61905]: DEBUG nova.network.neutron [-] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.932957] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.943616] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 940.943616] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a4c3b-c2ca-cddd-0e56-5e4f53d1e3dc" [ 940.943616] env[61905]: _type = "HttpNfcLease" [ 940.943616] env[61905]: } is ready. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 940.943932] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 940.943932] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a4c3b-c2ca-cddd-0e56-5e4f53d1e3dc" [ 940.943932] env[61905]: _type = "HttpNfcLease" [ 940.943932] env[61905]: }. {{(pid=61905) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 940.944718] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb111cfc-3215-4d7e-9622-697ccf1708ea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.953878] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265699f-a577-70df-195b-a6803203667a/disk-0.vmdk from lease info. {{(pid=61905) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 940.954119] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265699f-a577-70df-195b-a6803203667a/disk-0.vmdk. {{(pid=61905) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 941.016082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.436s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.026559] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c47f0606-fc41-4a66-bfaf-26c5a84b7871 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.088569] env[61905]: DEBUG oslo_vmware.api [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362740, 'name': PowerOnVM_Task, 'duration_secs': 0.547119} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.088836] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 941.089784] env[61905]: INFO nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Took 7.42 seconds to spawn the instance on the hypervisor. [ 941.089784] env[61905]: DEBUG nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 941.090698] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7c4111-dd48-4688-9d47-87d46716d352 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.111636] env[61905]: DEBUG oslo_vmware.api [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Task: {'id': task-1362745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17417} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.111636] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.111636] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.111636] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.111636] env[61905]: INFO nova.compute.manager [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Took 1.29 seconds to destroy the instance on the hypervisor. [ 941.111636] env[61905]: DEBUG oslo.service.loopingcall [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.111636] env[61905]: DEBUG nova.compute.manager [-] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 941.111636] env[61905]: DEBUG nova.network.neutron [-] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 941.117494] env[61905]: INFO nova.compute.manager [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Shelve offloading [ 941.119075] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.119309] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d7c0529-c69f-4f0f-ae08-f64e03630a95 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.127716] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 941.127716] env[61905]: value = "task-1362746" [ 941.127716] env[61905]: _type = "Task" [ 941.127716] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.140289] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] VM already powered off {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 941.140469] env[61905]: DEBUG nova.compute.manager [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 941.141354] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f775ddc2-6448-4612-b3fd-8a5600f4a428 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.149220] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.149431] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.149665] env[61905]: DEBUG nova.network.neutron [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.238610] env[61905]: DEBUG nova.network.neutron [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updated VIF entry in instance network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.239292] env[61905]: DEBUG nova.network.neutron [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap767ea9d8-de", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.388689] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-30862de5-1cfa-494a-a81d-1215a3580339" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.389044] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Instance network_info: |[{"id": "3892e570-77e4-46de-8f2b-a098cd36d007", "address": "fa:16:3e:13:c3:3f", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3892e570-77", "ovs_interfaceid": "3892e570-77e4-46de-8f2b-a098cd36d007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 941.390989] env[61905]: DEBUG oslo_concurrency.lockutils [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] Acquired lock "refresh_cache-30862de5-1cfa-494a-a81d-1215a3580339" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.391226] env[61905]: DEBUG nova.network.neutron [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Refreshing network info cache for port 3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.396695] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c3:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3892e570-77e4-46de-8f2b-a098cd36d007', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.400975] env[61905]: DEBUG oslo.service.loopingcall [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.405709] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.406941] env[61905]: DEBUG nova.compute.manager [req-9af4d224-6c30-4dab-b58b-2c9b5e7bdffd req-70387d01-6de4-4af1-9ef4-d67181bdfd99 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Received event network-vif-deleted-71e6216d-55ff-4eaa-a053-1d3da0bbd7ff {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.407153] env[61905]: INFO nova.compute.manager [req-9af4d224-6c30-4dab-b58b-2c9b5e7bdffd req-70387d01-6de4-4af1-9ef4-d67181bdfd99 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Neutron deleted interface 71e6216d-55ff-4eaa-a053-1d3da0bbd7ff; detaching it from the instance and deleting it from the info cache [ 941.407326] env[61905]: DEBUG nova.network.neutron [req-9af4d224-6c30-4dab-b58b-2c9b5e7bdffd req-70387d01-6de4-4af1-9ef4-d67181bdfd99 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.408891] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bddda2e1-2bb4-4acc-9c24-fa1872cafa39 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.436674] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.437069] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.447883] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.448172] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.448172] env[61905]: value = "task-1362747" [ 941.448172] env[61905]: _type = "Task" [ 941.448172] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.464235] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362747, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.529679] env[61905]: DEBUG oslo_concurrency.lockutils [None req-afa14ab9-1070-47c0-93c9-4eaf2eac9959 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.991s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.530718] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.664s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.530927] env[61905]: INFO nova.compute.manager [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Unshelving [ 941.531752] env[61905]: WARNING oslo_messaging._drivers.amqpdriver [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 941.618374] env[61905]: INFO nova.compute.manager [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Took 19.64 seconds to build instance. [ 941.743958] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bad8813-4f91-488e-b6f2-0b68176b0078 req-1b66b54d-dfe8-417d-a243-9b4d96a6e923 service nova] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.820995] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Completed reading data from the image iterator. {{(pid=61905) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 941.820995] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265699f-a577-70df-195b-a6803203667a/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 941.821973] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c46066-b407-4e29-ab17-3d3359d3a496 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.831289] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265699f-a577-70df-195b-a6803203667a/disk-0.vmdk is in state: ready. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 941.831289] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265699f-a577-70df-195b-a6803203667a/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 941.831289] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-70ebf4a9-1eab-4e45-86e0-67f840f556af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.843969] env[61905]: DEBUG nova.network.neutron [-] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.866328] env[61905]: DEBUG nova.network.neutron [-] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.913391] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.914118] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.914118] env[61905]: DEBUG nova.network.neutron [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.915620] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33a67fff-57fa-4165-9474-a53f2817afb1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.926903] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe67eb1b-dda0-4b47-8d24-42c4954f85c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.942815] env[61905]: DEBUG nova.compute.utils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 941.953612] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.965054] env[61905]: DEBUG nova.compute.manager [req-9af4d224-6c30-4dab-b58b-2c9b5e7bdffd req-70387d01-6de4-4af1-9ef4-d67181bdfd99 service nova] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Detach interface failed, port_id=71e6216d-55ff-4eaa-a053-1d3da0bbd7ff, reason: Instance e50cac29-797e-44a2-aafc-868e45ffd9cc could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 941.966571] env[61905]: DEBUG nova.network.neutron [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updating instance_info_cache with network_info: [{"id": "f8651682-560e-4a78-8a0a-bd0024272caa", "address": "fa:16:3e:f8:aa:04", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8651682-56", "ovs_interfaceid": "f8651682-560e-4a78-8a0a-bd0024272caa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.974302] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362747, 'name': CreateVM_Task, 'duration_secs': 0.40246} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.974302] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.974302] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.974302] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.974302] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.974655] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c52da492-b349-4da4-bb12-0abd2645306f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.980521] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 941.980521] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5252025b-3fc9-ef8a-bc4e-4c6241391eaa" [ 941.980521] env[61905]: _type = "Task" [ 941.980521] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.990391] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5252025b-3fc9-ef8a-bc4e-4c6241391eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.019932] env[61905]: DEBUG oslo_vmware.rw_handles [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265699f-a577-70df-195b-a6803203667a/disk-0.vmdk. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 942.020463] env[61905]: INFO nova.virt.vmwareapi.images [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Downloaded image file data 4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d [ 942.021874] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c7750c-20b4-49c1-a962-517769d13204 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.040919] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9709209c-67e0-47b0-bc1a-b3fd4baf9e27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.070181] env[61905]: INFO nova.virt.vmwareapi.images [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] The imported VM was unregistered [ 942.072760] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Caching image {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 942.072956] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.073268] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f346d53-7291-4c21-853c-dd2dd6cdbf25 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.084975] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created directory with path [datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.085191] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253/OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253.vmdk to [datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk. {{(pid=61905) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 942.085456] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-12dfa9f1-abca-40a0-8e5a-b7f4ec28afc7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.094222] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 942.094222] env[61905]: value = "task-1362749" [ 942.094222] env[61905]: _type = "Task" [ 942.094222] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.102732] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.121040] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ed5d7227-9f8c-4f9f-bb66-03292ec50a32 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "7bb17b60-268a-4670-beb8-df5232a698ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.155s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.338364] env[61905]: DEBUG nova.network.neutron [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Updated VIF entry in instance network info cache for port 3892e570-77e4-46de-8f2b-a098cd36d007. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.338763] env[61905]: DEBUG nova.network.neutron [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Updating instance_info_cache with network_info: [{"id": "3892e570-77e4-46de-8f2b-a098cd36d007", "address": "fa:16:3e:13:c3:3f", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3892e570-77", "ovs_interfaceid": "3892e570-77e4-46de-8f2b-a098cd36d007", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.346233] env[61905]: INFO nova.compute.manager [-] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Took 1.45 seconds to deallocate network for instance. [ 942.369047] env[61905]: INFO nova.compute.manager [-] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Took 1.26 seconds to deallocate network for instance. [ 942.448479] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.453194] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.469650] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.492647] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5252025b-3fc9-ef8a-bc4e-4c6241391eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.013864} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.493115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.493526] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.493827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.494197] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.494197] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.494489] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-800ffd77-b685-4fcd-b6ed-03dfea282d9a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.510569] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.510569] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.510569] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99ebb9e9-b87f-4330-af4b-bd33f2f65498 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.517156] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 942.517156] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d50fa2-2744-935a-6789-5b5621e69e96" [ 942.517156] env[61905]: _type = "Task" [ 942.517156] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.528809] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d50fa2-2744-935a-6789-5b5621e69e96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.571494] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.572044] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.572312] env[61905]: DEBUG nova.objects.instance [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lazy-loading 'pci_requests' on Instance uuid 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.606541] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.694292] env[61905]: DEBUG nova.network.neutron [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [{"id": "3802415e-d978-40f5-8265-2e03cbdd0814", "address": "fa:16:3e:3c:3e:e6", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3802415e-d9", "ovs_interfaceid": "3802415e-d978-40f5-8265-2e03cbdd0814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.773249] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 942.774353] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a433b47d-b59e-4fb9-800d-8431469603bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.785388] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.785767] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e6ed0a2-c08d-4052-aaa3-a347cbaf1604 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.842230] env[61905]: DEBUG oslo_concurrency.lockutils [req-36dbfdce-2cff-4166-893a-9e1cd91c520e req-992ae3e0-a3bd-4259-a42f-5dda2b1da949 service nova] Releasing lock "refresh_cache-30862de5-1cfa-494a-a81d-1215a3580339" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.853691] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.875126] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.875433] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.875595] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleting the datastore file [datastore2] 4b1723a2-94a2-4070-9b47-85c9c8169137 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.876723] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.877157] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac3553ac-5a46-4fb8-b9d5-3869a8f570f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.886353] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 942.886353] env[61905]: value = "task-1362751" [ 942.886353] env[61905]: _type = "Task" [ 942.886353] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.897034] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.951130] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.967306] env[61905]: DEBUG nova.compute.manager [req-cc3f93d8-00e3-443c-84e5-5e573c0591ac req-e1230995-6e24-48fc-86ea-0ee493e7ac30 service nova] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Received event network-vif-deleted-50e51588-2e01-4760-abff-6c8ee440a693 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.029814] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d50fa2-2744-935a-6789-5b5621e69e96, 'name': SearchDatastore_Task, 'duration_secs': 0.07443} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.031366] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d77a0f5d-99c2-435e-9527-cded9880e2ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.039398] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 943.039398] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ca530f-91e5-4494-0f35-2c38fe3a8607" [ 943.039398] env[61905]: _type = "Task" [ 943.039398] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.042950] env[61905]: INFO nova.compute.manager [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Rebuilding instance [ 943.052041] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ca530f-91e5-4494-0f35-2c38fe3a8607, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.076473] env[61905]: DEBUG nova.objects.instance [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lazy-loading 'numa_topology' on Instance uuid 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.087637] env[61905]: DEBUG nova.compute.manager [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 943.088624] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e83605-3e36-4bab-9ebd-acfa4616ed7f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.113510] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.197673] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.399816] env[61905]: DEBUG oslo_vmware.api [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249229} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.399816] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.400064] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.400152] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.423126] env[61905]: DEBUG nova.compute.manager [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Received event network-vif-unplugged-f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.423509] env[61905]: DEBUG oslo_concurrency.lockutils [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] Acquiring lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.423771] env[61905]: DEBUG oslo_concurrency.lockutils [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.423954] env[61905]: DEBUG oslo_concurrency.lockutils [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.424144] env[61905]: DEBUG nova.compute.manager [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] No waiting events found dispatching network-vif-unplugged-f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 943.424337] env[61905]: WARNING nova.compute.manager [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Received unexpected event network-vif-unplugged-f8651682-560e-4a78-8a0a-bd0024272caa for instance with vm_state shelved and task_state shelving_offloading. [ 943.424481] env[61905]: DEBUG nova.compute.manager [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Received event network-changed-f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.424679] env[61905]: DEBUG nova.compute.manager [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Refreshing instance network info cache due to event network-changed-f8651682-560e-4a78-8a0a-bd0024272caa. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.424842] env[61905]: DEBUG oslo_concurrency.lockutils [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] Acquiring lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.424962] env[61905]: DEBUG oslo_concurrency.lockutils [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] Acquired lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.425166] env[61905]: DEBUG nova.network.neutron [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Refreshing network info cache for port f8651682-560e-4a78-8a0a-bd0024272caa {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.427731] env[61905]: INFO nova.scheduler.client.report [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocations for instance 4b1723a2-94a2-4070-9b47-85c9c8169137 [ 943.452163] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.510891] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.511237] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.511604] env[61905]: INFO nova.compute.manager [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Attaching volume 7c5adcd9-fa15-4e5e-b5bf-e23627647ad2 to /dev/sdb [ 943.549571] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2095bb03-9562-42b9-abd1-0726f5866c20 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.556745] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ca530f-91e5-4494-0f35-2c38fe3a8607, 'name': SearchDatastore_Task, 'duration_secs': 0.084211} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.557651] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.558048] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 30862de5-1cfa-494a-a81d-1215a3580339/30862de5-1cfa-494a-a81d-1215a3580339.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.558369] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e0487e2-2907-42ea-a5ac-d5dd990d286c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.563781] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2573ae2f-801c-4314-9faa-8899ad278450 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.568886] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 943.568886] env[61905]: value = "task-1362752" [ 943.568886] env[61905]: _type = "Task" [ 943.568886] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.581088] env[61905]: INFO nova.compute.claims [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.583997] env[61905]: DEBUG nova.virt.block_device [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating existing volume attachment record: bd0e6e29-2625-430d-bd5a-f5d981491065 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 943.590284] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.607186] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.611075] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5df84ac1-9d27-4b77-afba-bac044d0b963 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.612854] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.622142] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 943.622142] env[61905]: value = "task-1362753" [ 943.622142] env[61905]: _type = "Task" [ 943.622142] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.636177] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.720730] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce55afb9-7640-4cb9-be10-185e8ac83561 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.744649] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a68a68b-c46e-4862-a16f-0e22031492aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.756728] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 83 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 943.933133] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.952429] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.081175] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.112721] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.135072] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362753, 'name': PowerOffVM_Task, 'duration_secs': 0.307908} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.135396] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.135617] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.136489] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff943f3-df28-422c-8486-3f7be5596a5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.148491] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.148804] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afa69309-feca-4b6e-ab44-502c3d63ad8d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.183538] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.184143] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.184143] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Deleting the datastore file [datastore1] 7bb17b60-268a-4670-beb8-df5232a698ae {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.184424] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eef6bc34-fff1-4f6b-a92a-46b5c9dc3d8c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.195872] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 944.195872] env[61905]: value = "task-1362758" [ 944.195872] env[61905]: _type = "Task" [ 944.195872] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.207502] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.262811] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.263397] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84ecc84e-e335-4d60-85e4-f1ac32ae0152 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.268113] env[61905]: DEBUG nova.network.neutron [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updated VIF entry in instance network info cache for port f8651682-560e-4a78-8a0a-bd0024272caa. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.268471] env[61905]: DEBUG nova.network.neutron [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updating instance_info_cache with network_info: [{"id": "f8651682-560e-4a78-8a0a-bd0024272caa", "address": "fa:16:3e:f8:aa:04", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": null, "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf8651682-56", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.276175] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 944.276175] env[61905]: value = "task-1362759" [ 944.276175] env[61905]: _type = "Task" [ 944.276175] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.298648] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.453422] env[61905]: DEBUG oslo_vmware.api [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362732, 'name': ReconfigVM_Task, 'duration_secs': 5.861585} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.453775] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.454028] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Reconfigured VM to detach interface {{(pid=61905) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 944.585589] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.614404] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.705673] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.772339] env[61905]: DEBUG oslo_concurrency.lockutils [req-cf1af39b-ff1b-4129-bd75-d325ecc48a19 req-5c4d4794-ef36-49f6-bf43-e972822c51da service nova] Releasing lock "refresh_cache-4b1723a2-94a2-4070-9b47-85c9c8169137" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.786839] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362759, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.802018] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a369ee-d940-4c65-8c9a-1fc30bb027d9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.810945] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcec11d-c8ac-40c1-96ae-9fabc25bb2ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.818951] env[61905]: DEBUG oslo_concurrency.lockutils [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "4b1723a2-94a2-4070-9b47-85c9c8169137" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.848284] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e6e76c-d4b4-4df1-8a72-9b49b271f111 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.857821] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38c41b9-f0cb-407b-8125-1c4ffecf9dd9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.873021] env[61905]: DEBUG nova.compute.provider_tree [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.082677] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362752, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.112551] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362749, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.876164} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.112859] env[61905]: INFO nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253/OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253.vmdk to [datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk. [ 945.113080] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Cleaning up location [datastore1] OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 945.113248] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_f78c9502-0223-40f0-8047-5a32cbd3a253 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.113521] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d40d6e7-780e-4634-a32b-5dad6a46e7f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.125950] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 945.125950] env[61905]: value = "task-1362760" [ 945.125950] env[61905]: _type = "Task" [ 945.125950] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.134573] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.210033] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.790993} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.210033] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.210252] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.210294] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.288103] env[61905]: DEBUG oslo_vmware.api [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362759, 'name': PowerOnVM_Task, 'duration_secs': 0.838721} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.288431] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.288617] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5efe39b1-bbff-466a-992b-79abec9d4bb1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance 'b9400202-eb37-4c75-bbf3-807edb7bc16f' progress to 100 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 945.376194] env[61905]: DEBUG nova.scheduler.client.report [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.587359] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362752, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.790109} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.587729] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 30862de5-1cfa-494a-a81d-1215a3580339/30862de5-1cfa-494a-a81d-1215a3580339.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 945.587883] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.588190] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60b62fa2-17f3-44e8-b7c3-717400e5acd0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.597127] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 945.597127] env[61905]: value = "task-1362761" [ 945.597127] env[61905]: _type = "Task" [ 945.597127] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.607262] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.636911] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188422} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.637347] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.637461] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.637732] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk to [datastore1] 26375621-b272-4243-95bd-5cf5b946cec4/26375621-b272-4243-95bd-5cf5b946cec4.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.638062] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44dd70cb-bac0-4663-aa8a-9bd89ce44bb9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.646449] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 945.646449] env[61905]: value = "task-1362762" [ 945.646449] env[61905]: _type = "Task" [ 945.646449] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.661029] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.822990] env[61905]: DEBUG nova.compute.manager [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.823179] env[61905]: DEBUG nova.compute.manager [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing instance network info cache due to event network-changed-7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.823263] env[61905]: DEBUG oslo_concurrency.lockutils [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.823426] env[61905]: DEBUG oslo_concurrency.lockutils [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.823563] env[61905]: DEBUG nova.network.neutron [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Refreshing network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 945.850336] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.882355] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.310s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.884536] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.031s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.884766] env[61905]: DEBUG nova.objects.instance [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lazy-loading 'resources' on Instance uuid 4eba4203-0e35-4c56-b24f-3ac47a7a8b83 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.934830] env[61905]: INFO nova.network.neutron [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 946.106650] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076752} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.107041] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 946.107763] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8052d5-40d1-4bfe-9b55-1c4d128c0a22 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.132071] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 30862de5-1cfa-494a-a81d-1215a3580339/30862de5-1cfa-494a-a81d-1215a3580339.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 946.132460] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3abb80b0-b768-4690-b4c4-c101aa084ff3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.157319] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.158811] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 946.158811] env[61905]: value = "task-1362764" [ 946.158811] env[61905]: _type = "Task" [ 946.158811] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.167405] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362764, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.261528] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 946.261528] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 946.261528] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.261528] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 946.262230] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.262640] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 946.263764] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 946.263764] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 946.263764] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 946.264144] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 946.267123] env[61905]: DEBUG nova.virt.hardware [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 946.267123] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedaa019-1351-4f3e-9ba0-4f509d20d19d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.279019] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e9a1be-bf62-493e-b056-5ceead9eb64c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.291402] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance VIF info [] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.296857] env[61905]: DEBUG oslo.service.loopingcall [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.300734] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.301766] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0d14684-1e60-460a-b8e4-42489a96bf00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.318988] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.318988] env[61905]: value = "task-1362765" [ 946.318988] env[61905]: _type = "Task" [ 946.318988] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.329564] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362765, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.509628] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "interface-63eb2219-fea2-4af0-90d2-e8d9ac53a138-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.509971] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-63eb2219-fea2-4af0-90d2-e8d9ac53a138-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.510360] env[61905]: DEBUG nova.objects.instance [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lazy-loading 'flavor' on Instance uuid 63eb2219-fea2-4af0-90d2-e8d9ac53a138 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.547328] env[61905]: DEBUG nova.network.neutron [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updated VIF entry in instance network info cache for port 7c93f7f3-4702-4071-8e42-c0627b146af5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 946.547328] env[61905]: DEBUG nova.network.neutron [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "address": "fa:16:3e:be:37:76", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a2f5768-03", "ovs_interfaceid": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.583655] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69866f98-aed3-4d6f-8659-851a120fdbdb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.593993] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e4159e-2eab-4133-ae8c-e88c010a9e57 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.632411] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7976da6e-3302-4752-89dc-f9566ef9e925 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.644316] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a7a45a-46db-4923-bc7a-f579e12587fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.661224] env[61905]: DEBUG nova.compute.provider_tree [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.669451] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.677230] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.833379] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362765, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.049040] env[61905]: DEBUG oslo_concurrency.lockutils [req-31402897-2df2-4aa1-8d97-99418f6aac43 req-143ef892-35a3-46ff-b822-98fc31e8cc2d service nova] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.049572] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.049923] env[61905]: DEBUG nova.network.neutron [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.144253] env[61905]: DEBUG nova.objects.instance [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lazy-loading 'pci_requests' on Instance uuid 63eb2219-fea2-4af0-90d2-e8d9ac53a138 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.160615] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.176023] env[61905]: DEBUG nova.scheduler.client.report [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 947.176331] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.331497] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362765, 'name': CreateVM_Task, 'duration_secs': 0.58883} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.331704] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.332247] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.332418] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.332750] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.333047] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-183ba4c3-78cc-4389-a46b-fbe9f813f89e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.339972] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 947.339972] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52177e4e-85d9-9b50-cf79-135233af11a2" [ 947.339972] env[61905]: _type = "Task" [ 947.339972] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.350183] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52177e4e-85d9-9b50-cf79-135233af11a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.464114] env[61905]: DEBUG nova.compute.manager [req-c327eac8-8d2f-433f-919e-1bb69d356f66 req-4af992d1-2513-4e1e-9444-cebb6984a59f service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-vif-plugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.464536] env[61905]: DEBUG oslo_concurrency.lockutils [req-c327eac8-8d2f-433f-919e-1bb69d356f66 req-4af992d1-2513-4e1e-9444-cebb6984a59f service nova] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.464733] env[61905]: DEBUG oslo_concurrency.lockutils [req-c327eac8-8d2f-433f-919e-1bb69d356f66 req-4af992d1-2513-4e1e-9444-cebb6984a59f service nova] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.464733] env[61905]: DEBUG oslo_concurrency.lockutils [req-c327eac8-8d2f-433f-919e-1bb69d356f66 req-4af992d1-2513-4e1e-9444-cebb6984a59f service nova] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.464926] env[61905]: DEBUG nova.compute.manager [req-c327eac8-8d2f-433f-919e-1bb69d356f66 req-4af992d1-2513-4e1e-9444-cebb6984a59f service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] No waiting events found dispatching network-vif-plugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 947.465115] env[61905]: WARNING nova.compute.manager [req-c327eac8-8d2f-433f-919e-1bb69d356f66 req-4af992d1-2513-4e1e-9444-cebb6984a59f service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received unexpected event network-vif-plugged-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 for instance with vm_state shelved_offloaded and task_state spawning. [ 947.551881] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.552305] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.552477] env[61905]: DEBUG nova.network.neutron [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.650610] env[61905]: DEBUG nova.objects.base [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Object Instance<63eb2219-fea2-4af0-90d2-e8d9ac53a138> lazy-loaded attributes: flavor,pci_requests {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 947.650995] env[61905]: DEBUG nova.network.neutron [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 947.653693] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.653954] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.654189] env[61905]: DEBUG nova.compute.manager [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Going to confirm migration 1 {{(pid=61905) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 947.670121] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.676630] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.677441] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.793s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.679822] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.803s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.680107] env[61905]: DEBUG nova.objects.instance [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lazy-loading 'resources' on Instance uuid e50cac29-797e-44a2-aafc-868e45ffd9cc {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.702566] env[61905]: INFO nova.scheduler.client.report [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted allocations for instance 4eba4203-0e35-4c56-b24f-3ac47a7a8b83 [ 947.758109] env[61905]: DEBUG nova.policy [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ff25da762d5421b9f1e24e4bcead22f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8cd0317a9e0e4f1d86c49a82e8ffbaa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.822571] env[61905]: INFO nova.network.neutron [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Port 1a2f5768-0301-4ff2-a1ae-e02fe03be64b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 947.822957] env[61905]: DEBUG nova.network.neutron [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [{"id": "7c93f7f3-4702-4071-8e42-c0627b146af5", "address": "fa:16:3e:8a:1d:46", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c93f7f3-47", "ovs_interfaceid": "7c93f7f3-4702-4071-8e42-c0627b146af5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.853675] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52177e4e-85d9-9b50-cf79-135233af11a2, 'name': SearchDatastore_Task, 'duration_secs': 0.017986} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.853961] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.854231] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.854469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.854621] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.854795] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.855106] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-820d4c42-f93c-4ed3-9a1c-ee0cc5a8ff51 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.871574] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.872734] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 947.872734] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c24e692-074c-4b62-8d88-c96114ee8995 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.881285] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 947.881285] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521a3a4d-a195-f3a3-9a01-3dc186475854" [ 947.881285] env[61905]: _type = "Task" [ 947.881285] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.892400] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521a3a4d-a195-f3a3-9a01-3dc186475854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.918038] env[61905]: DEBUG nova.compute.manager [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.918386] env[61905]: DEBUG nova.compute.manager [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing instance network info cache due to event network-changed-d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 947.918537] env[61905]: DEBUG oslo_concurrency.lockutils [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.918684] env[61905]: DEBUG oslo_concurrency.lockutils [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.918877] env[61905]: DEBUG nova.network.neutron [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.166034] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.181681] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.213532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2a01f40-3442-4b67-9d77-0ff293ba79bd tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "4eba4203-0e35-4c56-b24f-3ac47a7a8b83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.476s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.246447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.246447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.246447] env[61905]: DEBUG nova.network.neutron [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.246447] env[61905]: DEBUG nova.objects.instance [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lazy-loading 'info_cache' on Instance uuid b9400202-eb37-4c75-bbf3-807edb7bc16f {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.327430] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-0f7ccb34-cb14-4b21-ae61-b066427d400e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.396426] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]521a3a4d-a195-f3a3-9a01-3dc186475854, 'name': SearchDatastore_Task, 'duration_secs': 0.030463} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.398732] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e04c60-092e-4d03-9e7d-66a7d1b53dd7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.401573] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30c3540-9a0b-4a08-a309-e1de8c5f54eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.408306] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 948.408306] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bd691f-5a79-15c0-f4ed-12c456824454" [ 948.408306] env[61905]: _type = "Task" [ 948.408306] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.414240] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f24091-e5f5-400f-b717-029764978f85 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.419657] env[61905]: DEBUG nova.network.neutron [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.455401] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6c901a-1511-4a88-bd68-bc6ae77c0060 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.458420] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bd691f-5a79-15c0-f4ed-12c456824454, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.469155] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7106dced-81d8-4f02-bb17-c4c9d09ef665 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.487652] env[61905]: DEBUG nova.compute.provider_tree [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.637983] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 948.638236] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290084', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'name': 'volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7b0db0a2-c990-4160-9be8-018239425114', 'attached_at': '', 'detached_at': '', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'serial': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 948.639108] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eebf117-6311-47eb-b8f5-e8386d0f8ff6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.659219] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac029ba-d2e2-408f-9487-0cdff590cfa9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.662097] env[61905]: DEBUG nova.network.neutron [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updated VIF entry in instance network info cache for port d1260d42-8ebd-4227-91b1-e34c80b3bdb0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 948.662510] env[61905]: DEBUG nova.network.neutron [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.683480] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.690571] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2/volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.691700] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-facb9744-3663-49f4-893b-70defafa25f8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.707220] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362764, 'name': ReconfigVM_Task, 'duration_secs': 2.267759} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.707779] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 30862de5-1cfa-494a-a81d-1215a3580339/30862de5-1cfa-494a-a81d-1215a3580339.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.708406] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49d5b3a6-6c7e-4055-9a42-d4e882dd2c67 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.713064] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 948.713064] env[61905]: value = "task-1362766" [ 948.713064] env[61905]: _type = "Task" [ 948.713064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.716896] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 948.716896] env[61905]: value = "task-1362767" [ 948.716896] env[61905]: _type = "Task" [ 948.716896] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.725675] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362766, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.728422] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362767, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.833421] env[61905]: DEBUG oslo_concurrency.lockutils [None req-826e6a52-d89e-4780-8ebe-6a3cd3f8bcbe tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-0f7ccb34-cb14-4b21-ae61-b066427d400e-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.036s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.922830] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52bd691f-5a79-15c0-f4ed-12c456824454, 'name': SearchDatastore_Task, 'duration_secs': 0.027856} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.923106] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.923366] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.923864] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.925761] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7cee371-67ff-49f1-8abb-09d83b766b77 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.934060] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 948.934060] env[61905]: value = "task-1362768" [ 948.934060] env[61905]: _type = "Task" [ 948.934060] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.943085] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.957380] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='bffdb36aa6aaf3f389c4dc0ff2f11e97',container_format='bare',created_at=2024-10-25T05:20:27Z,direct_url=,disk_format='vmdk',id=af6a522f-8c87-46b5-bf21-04939866f8ef,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2141046355-shelved',owner='a78ffb1a94ca4220a39c68529eb5693d',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-10-25T05:20:43Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 948.957641] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 948.957819] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.958036] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 948.958224] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.958380] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 948.958586] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 948.958748] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 948.958925] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 948.959102] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 948.959276] env[61905]: DEBUG nova.virt.hardware [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 948.960111] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52161006-b542-47b1-912f-520a84425215 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.969169] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111bbdbe-1db7-4c5e-9a83-27546ed0f49a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.985226] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:2c:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 948.992916] env[61905]: DEBUG oslo.service.loopingcall [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.993872] env[61905]: DEBUG nova.scheduler.client.report [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.997210] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 948.997386] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b421618-8a61-4112-8716-59f143520226 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.013981] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.334s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.016073] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.083s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.016275] env[61905]: DEBUG nova.objects.instance [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'resources' on Instance uuid 4b1723a2-94a2-4070-9b47-85c9c8169137 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.026154] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.026154] env[61905]: value = "task-1362769" [ 949.026154] env[61905]: _type = "Task" [ 949.026154] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.036035] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362769, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.036972] env[61905]: INFO nova.scheduler.client.report [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Deleted allocations for instance e50cac29-797e-44a2-aafc-868e45ffd9cc [ 949.166830] env[61905]: DEBUG oslo_concurrency.lockutils [req-0885fa97-a77a-47ef-8483-05aaf42634de req-c0a99f71-3fda-439a-a811-56f39668bdeb service nova] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.167276] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362762, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.094702} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.167882] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d/4ef923b1-ffb9-4ef5-ada7-d90dba2c3a6d.vmdk to [datastore1] 26375621-b272-4243-95bd-5cf5b946cec4/26375621-b272-4243-95bd-5cf5b946cec4.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.168700] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad87f97-cb69-4536-a4a3-e20936819d13 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.192592] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 26375621-b272-4243-95bd-5cf5b946cec4/26375621-b272-4243-95bd-5cf5b946cec4.vmdk or device None with type streamOptimized {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.192951] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b272eae2-da4c-4f56-a7ad-ff98976f960a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.218703] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 949.218703] env[61905]: value = "task-1362770" [ 949.218703] env[61905]: _type = "Task" [ 949.218703] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.228286] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.234643] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362770, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.237800] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362767, 'name': Rename_Task, 'duration_secs': 0.518489} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.238169] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.238454] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4198d0b4-5707-45a3-afba-6e33d0e3d973 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.246810] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 949.246810] env[61905]: value = "task-1362771" [ 949.246810] env[61905]: _type = "Task" [ 949.246810] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.262943] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.294031] env[61905]: DEBUG nova.network.neutron [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Successfully updated port: 1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.446205] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362768, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.520062] env[61905]: DEBUG nova.objects.instance [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'numa_topology' on Instance uuid 4b1723a2-94a2-4070-9b47-85c9c8169137 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.523151] env[61905]: DEBUG nova.network.neutron [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [{"id": "3802415e-d978-40f5-8265-2e03cbdd0814", "address": "fa:16:3e:3c:3e:e6", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3802415e-d9", "ovs_interfaceid": "3802415e-d978-40f5-8265-2e03cbdd0814", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.537658] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362769, 'name': CreateVM_Task, 'duration_secs': 0.410917} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.537888] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 949.538610] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.538810] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.539240] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 949.539522] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1004b362-83a5-44d3-b9a6-82e5f867c52b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.545593] env[61905]: DEBUG oslo_concurrency.lockutils [None req-60cfeb2a-cc17-4532-84b4-39bfe45db732 tempest-MultipleCreateTestJSON-2064647044 tempest-MultipleCreateTestJSON-2064647044-project-member] Lock "e50cac29-797e-44a2-aafc-868e45ffd9cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.732s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.548400] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 949.548400] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527c461f-0a2b-06c9-d4ac-46a27e006eed" [ 949.548400] env[61905]: _type = "Task" [ 949.548400] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.563612] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527c461f-0a2b-06c9-d4ac-46a27e006eed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.598616] env[61905]: DEBUG nova.compute.manager [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.598892] env[61905]: DEBUG nova.compute.manager [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing instance network info cache due to event network-changed-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 949.599192] env[61905]: DEBUG oslo_concurrency.lockutils [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] Acquiring lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.599369] env[61905]: DEBUG oslo_concurrency.lockutils [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] Acquired lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.599616] env[61905]: DEBUG nova.network.neutron [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Refreshing network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.725103] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.735184] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362770, 'name': ReconfigVM_Task, 'duration_secs': 0.342716} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.735598] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 26375621-b272-4243-95bd-5cf5b946cec4/26375621-b272-4243-95bd-5cf5b946cec4.vmdk or device None with type streamOptimized {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.736406] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f219e42-b395-4063-bd21-965aaf100dd0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.746583] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 949.746583] env[61905]: value = "task-1362772" [ 949.746583] env[61905]: _type = "Task" [ 949.746583] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.760518] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.764033] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362772, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.796245] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.796498] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.796814] env[61905]: DEBUG nova.network.neutron [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.949898] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362768, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.022890] env[61905]: DEBUG nova.objects.base [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Object Instance<4b1723a2-94a2-4070-9b47-85c9c8169137> lazy-loaded attributes: resources,numa_topology {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 950.026395] env[61905]: DEBUG nova.compute.manager [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-vif-plugged-1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.026636] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.026838] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.027016] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.027190] env[61905]: DEBUG nova.compute.manager [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] No waiting events found dispatching network-vif-plugged-1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 950.027360] env[61905]: WARNING nova.compute.manager [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received unexpected event network-vif-plugged-1a2f5768-0301-4ff2-a1ae-e02fe03be64b for instance with vm_state active and task_state None. [ 950.027531] env[61905]: DEBUG nova.compute.manager [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-changed-1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.027727] env[61905]: DEBUG nova.compute.manager [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing instance network info cache due to event network-changed-1a2f5768-0301-4ff2-a1ae-e02fe03be64b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 950.027909] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.028857] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-b9400202-eb37-4c75-bbf3-807edb7bc16f" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.029096] env[61905]: DEBUG nova.objects.instance [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lazy-loading 'migration_context' on Instance uuid b9400202-eb37-4c75-bbf3-807edb7bc16f {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.063978] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.064271] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Processing image af6a522f-8c87-46b5-bf21-04939866f8ef {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.065036] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.065036] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.065036] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.065303] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2de46eb1-2664-4660-80ca-801fc81905e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.080588] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.080777] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.081572] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb709c39-a79b-41e7-84c9-04f28412e4e0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.088179] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 950.088179] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ca3aa3-8f8f-0c22-fbef-374bc8ae8328" [ 950.088179] env[61905]: _type = "Task" [ 950.088179] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.109102] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Preparing fetch location {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 950.109376] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Fetch image to [datastore2] OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50/OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50.vmdk {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 950.109652] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Downloading stream optimized image af6a522f-8c87-46b5-bf21-04939866f8ef to [datastore2] OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50/OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50.vmdk on the data store datastore2 as vApp {{(pid=61905) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 950.109848] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Downloading image file data af6a522f-8c87-46b5-bf21-04939866f8ef to the ESX as VM named 'OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50' {{(pid=61905) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 950.209885] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 950.209885] env[61905]: value = "resgroup-9" [ 950.209885] env[61905]: _type = "ResourcePool" [ 950.209885] env[61905]: }. {{(pid=61905) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 950.210216] env[61905]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d6c632cc-02b1-43a8-9ab6-da2366f8139d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.238924] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362766, 'name': ReconfigVM_Task, 'duration_secs': 1.359535} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.240994] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Reconfigured VM instance instance-00000051 to attach disk [datastore2] volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2/volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 950.245792] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lease: (returnval){ [ 950.245792] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527601b0-19de-9c33-b387-112452423459" [ 950.245792] env[61905]: _type = "HttpNfcLease" [ 950.245792] env[61905]: } obtained for vApp import into resource pool (val){ [ 950.245792] env[61905]: value = "resgroup-9" [ 950.245792] env[61905]: _type = "ResourcePool" [ 950.245792] env[61905]: }. {{(pid=61905) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 950.246094] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the lease: (returnval){ [ 950.246094] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527601b0-19de-9c33-b387-112452423459" [ 950.246094] env[61905]: _type = "HttpNfcLease" [ 950.246094] env[61905]: } to be ready. {{(pid=61905) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 950.246687] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9557b8b-352f-44aa-997f-b0d0f02ea912 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.249351] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcae0bf0-c479-4cfc-886f-0d753dd99b66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.275611] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1ac78d-cdd9-491c-a86a-0758acc340b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.285735] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362772, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.286029] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 950.286029] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527601b0-19de-9c33-b387-112452423459" [ 950.286029] env[61905]: _type = "HttpNfcLease" [ 950.286029] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 950.288150] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 950.288150] env[61905]: value = "task-1362774" [ 950.288150] env[61905]: _type = "Task" [ 950.288150] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.319126] env[61905]: DEBUG oslo_vmware.api [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362771, 'name': PowerOnVM_Task, 'duration_secs': 0.890517} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.324777] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.325022] env[61905]: INFO nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Took 12.00 seconds to spawn the instance on the hypervisor. [ 950.325682] env[61905]: DEBUG nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 950.326102] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774ef476-037d-42fb-b35b-0bb1ad26fb94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.332555] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62df7363-6ef2-4f73-8319-69159fc16954 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.336398] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.347252] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1e49c5-d0d5-4fff-953f-28e94988756e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.362372] env[61905]: DEBUG nova.compute.provider_tree [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.373118] env[61905]: WARNING nova.network.neutron [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] dd057eb4-847f-4d06-88bd-a25b4fd8db1f already exists in list: networks containing: ['dd057eb4-847f-4d06-88bd-a25b4fd8db1f']. ignoring it [ 950.455491] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362768, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.026091} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.456253] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.459230] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.459230] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6895eaa0-8371-42f4-882a-65a26c186164 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.466917] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 950.466917] env[61905]: value = "task-1362775" [ 950.466917] env[61905]: _type = "Task" [ 950.466917] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.483046] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.535023] env[61905]: DEBUG nova.objects.base [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 950.535023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2978169f-c892-48d8-be7a-8f4ee4686c9f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.564297] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eaf6ca5-4a19-4a89-bcbf-ed0e8d621e28 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.573211] env[61905]: DEBUG oslo_vmware.api [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 950.573211] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528d172f-84b6-e680-19de-849db7139ce5" [ 950.573211] env[61905]: _type = "Task" [ 950.573211] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.581135] env[61905]: DEBUG oslo_vmware.api [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528d172f-84b6-e680-19de-849db7139ce5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.736785] env[61905]: DEBUG nova.network.neutron [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "address": "fa:16:3e:be:37:76", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a2f5768-03", "ovs_interfaceid": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.773467] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 950.773467] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527601b0-19de-9c33-b387-112452423459" [ 950.773467] env[61905]: _type = "HttpNfcLease" [ 950.773467] env[61905]: } is initializing. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 950.779958] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362772, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.798772] env[61905]: DEBUG oslo_vmware.api [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362774, 'name': ReconfigVM_Task, 'duration_secs': 0.217312} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.799813] env[61905]: DEBUG nova.network.neutron [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updated VIF entry in instance network info cache for port 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.800241] env[61905]: DEBUG nova.network.neutron [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [{"id": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "address": "fa:16:3e:db:2c:3c", "network": {"id": "69349adb-d2dc-410e-9be8-f675ede64e97", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-78486366-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a78ffb1a94ca4220a39c68529eb5693d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767ea9d8-de", "ovs_interfaceid": "767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.801959] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290084', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'name': 'volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7b0db0a2-c990-4160-9be8-018239425114', 'attached_at': '', 'detached_at': '', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'serial': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 950.855204] env[61905]: INFO nova.compute.manager [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Took 18.52 seconds to build instance. [ 950.865309] env[61905]: DEBUG nova.scheduler.client.report [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 950.978034] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.492282} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.978431] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.979253] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e21acf-ab5e-4de0-9691-93a4d0ab4554 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.001204] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.001204] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a21db8ec-6e20-43c6-98d6-1dad84ae57a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.027169] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 951.027169] env[61905]: value = "task-1362776" [ 951.027169] env[61905]: _type = "Task" [ 951.027169] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.037749] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362776, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.082714] env[61905]: DEBUG oslo_vmware.api [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528d172f-84b6-e680-19de-849db7139ce5, 'name': SearchDatastore_Task, 'duration_secs': 0.008871} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.083063] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.244017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.245029] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.245380] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.245554] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.245741] env[61905]: DEBUG nova.network.neutron [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Refreshing network info cache for port 1a2f5768-0301-4ff2-a1ae-e02fe03be64b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.247530] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b774536d-d11d-4b3a-b34f-b71af4361143 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.269508] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.269632] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.269786] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.269988] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.270175] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.270367] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.270537] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.270696] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.270927] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.271640] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.271855] env[61905]: DEBUG nova.virt.hardware [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.278191] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Reconfiguring VM to attach interface {{(pid=61905) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 951.284999] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82b63923-fd42-4372-bd46-98ec869335c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.302995] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362772, 'name': Rename_Task, 'duration_secs': 1.197164} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.305456] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.305954] env[61905]: DEBUG oslo_concurrency.lockutils [req-5694f975-d600-4af5-be88-c4f5a3f56e4b req-56f9e357-bdf8-4c83-92d6-d89a0c0726b7 service nova] Releasing lock "refresh_cache-1502df44-9166-4ce8-9117-a57e7be2d299" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.306437] env[61905]: DEBUG oslo_vmware.api [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 951.306437] env[61905]: value = "task-1362777" [ 951.306437] env[61905]: _type = "Task" [ 951.306437] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.306624] env[61905]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 951.306624] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527601b0-19de-9c33-b387-112452423459" [ 951.306624] env[61905]: _type = "HttpNfcLease" [ 951.306624] env[61905]: } is ready. {{(pid=61905) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 951.306792] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f01bbfea-357a-4d81-a573-2e3745d891ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.310276] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 951.310276] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527601b0-19de-9c33-b387-112452423459" [ 951.310276] env[61905]: _type = "HttpNfcLease" [ 951.310276] env[61905]: }. {{(pid=61905) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 951.314389] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b20a34d-f69c-440a-ab59-fedbeea37c5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.324171] env[61905]: DEBUG oslo_vmware.api [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362777, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.330614] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 951.330614] env[61905]: value = "task-1362778" [ 951.330614] env[61905]: _type = "Task" [ 951.330614] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.331019] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52599f1a-be33-05e5-dd9c-6fe08d144b23/disk-0.vmdk from lease info. {{(pid=61905) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 951.331253] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52599f1a-be33-05e5-dd9c-6fe08d144b23/disk-0.vmdk. {{(pid=61905) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 951.398713] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d8f25e6a-cd86-4407-a72d-a82f661657f2 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "30862de5-1cfa-494a-a81d-1215a3580339" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.075s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.399463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.383s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.410142] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.327s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.411824] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362778, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.416479] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9d85e9da-b02c-41db-8989-0525b108c914 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.539522] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362776, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.824951] env[61905]: DEBUG oslo_vmware.api [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.845784] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362778, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.906216] env[61905]: DEBUG nova.objects.instance [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.925414] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5867a994-6308-4f64-80cc-ef0bbfffd684 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.807s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.926622] env[61905]: DEBUG oslo_concurrency.lockutils [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 7.108s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.930358] env[61905]: DEBUG oslo_concurrency.lockutils [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.930605] env[61905]: DEBUG oslo_concurrency.lockutils [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.930806] env[61905]: DEBUG oslo_concurrency.lockutils [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.934318] env[61905]: INFO nova.compute.manager [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Terminating instance [ 951.939585] env[61905]: DEBUG nova.compute.manager [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 951.939763] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 951.940154] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d07a579-79ac-4bca-a102-ddd8af1161c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.954344] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074b19cb-6570-452e-a4dc-d69d7a8d19ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.005403] env[61905]: WARNING nova.virt.vmwareapi.vmops [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4b1723a2-94a2-4070-9b47-85c9c8169137 could not be found. [ 952.006200] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.006571] env[61905]: INFO nova.compute.manager [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Took 0.07 seconds to destroy the instance on the hypervisor. [ 952.007049] env[61905]: DEBUG oslo.service.loopingcall [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 952.017884] env[61905]: DEBUG nova.compute.manager [-] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 952.017884] env[61905]: DEBUG nova.network.neutron [-] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 952.040342] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362776, 'name': ReconfigVM_Task, 'duration_secs': 0.613458} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.046418] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 7bb17b60-268a-4670-beb8-df5232a698ae/7bb17b60-268a-4670-beb8-df5232a698ae.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 952.047446] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9684493-fe1d-4516-a94c-f2a459f071f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.057251] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 952.057251] env[61905]: value = "task-1362779" [ 952.057251] env[61905]: _type = "Task" [ 952.057251] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.076319] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362779, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.091433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "30862de5-1cfa-494a-a81d-1215a3580339" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.091989] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "30862de5-1cfa-494a-a81d-1215a3580339" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.092624] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "30862de5-1cfa-494a-a81d-1215a3580339-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.092719] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "30862de5-1cfa-494a-a81d-1215a3580339-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.093192] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "30862de5-1cfa-494a-a81d-1215a3580339-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.105982] env[61905]: INFO nova.compute.manager [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Terminating instance [ 952.108823] env[61905]: DEBUG nova.compute.manager [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 952.109526] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.110210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd38c86-6595-442e-b67e-d6e390949463 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.123071] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.123071] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b26ea10c-1796-4da5-9256-6d421757b7a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.131259] env[61905]: DEBUG oslo_vmware.api [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 952.131259] env[61905]: value = "task-1362780" [ 952.131259] env[61905]: _type = "Task" [ 952.131259] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.146599] env[61905]: DEBUG oslo_vmware.api [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.170331] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.177513] env[61905]: DEBUG nova.network.neutron [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updated VIF entry in instance network info cache for port 1a2f5768-0301-4ff2-a1ae-e02fe03be64b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.177998] env[61905]: DEBUG nova.network.neutron [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "address": "fa:16:3e:be:37:76", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a2f5768-03", "ovs_interfaceid": "1a2f5768-0301-4ff2-a1ae-e02fe03be64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.247491] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d32b6ab-28eb-4d0e-81f3-f4ec8bc28069 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.260088] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00471682-5d7c-4a09-b4b3-1f068e576389 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.307916] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84a97c5-a9c4-4121-a501-f08ad34c3caa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.324014] env[61905]: DEBUG oslo_vmware.api [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362777, 'name': ReconfigVM_Task, 'duration_secs': 0.892135} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.326401] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.326678] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Reconfigured VM to attach interface {{(pid=61905) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 952.333148] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8878e29-4839-4f8f-9be3-7d3646416aa4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.361216] env[61905]: DEBUG oslo_vmware.api [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362778, 'name': PowerOnVM_Task, 'duration_secs': 0.551962} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.361971] env[61905]: DEBUG nova.compute.provider_tree [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.367622] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.367874] env[61905]: INFO nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Took 16.38 seconds to spawn the instance on the hypervisor. [ 952.368083] env[61905]: DEBUG nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 952.370313] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35617668-c4bf-4adf-a30e-c2b7ef30da31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.414469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3ec3b94f-0310-41d6-9894-31c2d1ff0b1d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.903s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.415564] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.246s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.415746] env[61905]: DEBUG nova.compute.manager [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 952.416994] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2298fa-99f4-414e-9bdf-19c5a9f1844b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.426557] env[61905]: DEBUG nova.compute.manager [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61905) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 952.428823] env[61905]: DEBUG nova.objects.instance [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.571555] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362779, 'name': Rename_Task, 'duration_secs': 0.220712} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.572010] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.573551] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb4e4f37-7e31-4eef-bb11-603319d1fe54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.582407] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 952.582407] env[61905]: value = "task-1362781" [ 952.582407] env[61905]: _type = "Task" [ 952.582407] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.592188] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362781, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.645139] env[61905]: DEBUG oslo_vmware.api [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362780, 'name': PowerOffVM_Task, 'duration_secs': 0.247842} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.647173] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.647366] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.647658] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-904e9027-a373-433b-8859-322df1c41284 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.681322] env[61905]: DEBUG oslo_concurrency.lockutils [req-bf1566e2-0f59-4f9e-9a4d-034365c2c403 req-1304501c-d05c-4d0e-8e81-91349ed20aa5 service nova] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.724804] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.725113] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.725565] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleting the datastore file [datastore1] 30862de5-1cfa-494a-a81d-1215a3580339 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.725951] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97d2133b-5761-4f20-8ba4-8940a22a6bfb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.736489] env[61905]: DEBUG oslo_vmware.api [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 952.736489] env[61905]: value = "task-1362783" [ 952.736489] env[61905]: _type = "Task" [ 952.736489] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.747904] env[61905]: DEBUG oslo_vmware.api [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.786217] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Completed reading data from the image iterator. {{(pid=61905) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 952.786652] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52599f1a-be33-05e5-dd9c-6fe08d144b23/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 952.788171] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee6f0d0-de4a-4a4b-8cf4-60329dfbe1a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.797574] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52599f1a-be33-05e5-dd9c-6fe08d144b23/disk-0.vmdk is in state: ready. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 952.797826] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52599f1a-be33-05e5-dd9c-6fe08d144b23/disk-0.vmdk. {{(pid=61905) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 952.798120] env[61905]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c3a1bb72-23b3-49e8-bbfc-fb5f2dc96e06 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.839687] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e731718c-963b-49fc-be61-6287c0b802a6 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-63eb2219-fea2-4af0-90d2-e8d9ac53a138-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.330s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.871626] env[61905]: DEBUG nova.scheduler.client.report [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.892962] env[61905]: DEBUG nova.network.neutron [-] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.898622] env[61905]: INFO nova.compute.manager [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Took 28.91 seconds to build instance. [ 952.933881] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.934211] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81fdcd17-63dc-48bd-ac55-fa7e05231496 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.943151] env[61905]: DEBUG oslo_vmware.api [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 952.943151] env[61905]: value = "task-1362784" [ 952.943151] env[61905]: _type = "Task" [ 952.943151] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.952561] env[61905]: DEBUG oslo_vmware.api [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.090038] env[61905]: DEBUG oslo_vmware.rw_handles [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52599f1a-be33-05e5-dd9c-6fe08d144b23/disk-0.vmdk. {{(pid=61905) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 953.090236] env[61905]: INFO nova.virt.vmwareapi.images [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Downloaded image file data af6a522f-8c87-46b5-bf21-04939866f8ef [ 953.095426] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83de3adf-6b3f-495b-ac25-f25f6402ad78 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.099065] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362781, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.115667] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-708010a4-f372-4481-ae44-c0de464ac489 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.150112] env[61905]: INFO nova.virt.vmwareapi.images [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] The imported VM was unregistered [ 953.152650] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Caching image {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 953.152926] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Creating directory with path [datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.153245] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccc275fc-4f33-41fc-a9d6-c135a13fe785 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.176376] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Created directory with path [datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.176571] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50/OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50.vmdk to [datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk. {{(pid=61905) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 953.176812] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c8856509-ecba-47ce-97ab-076e0d049bd6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.186529] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 953.186529] env[61905]: value = "task-1362786" [ 953.186529] env[61905]: _type = "Task" [ 953.186529] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.194669] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.247240] env[61905]: DEBUG oslo_vmware.api [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280649} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.247240] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.247240] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.247518] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.247518] env[61905]: INFO nova.compute.manager [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Took 1.14 seconds to destroy the instance on the hypervisor. [ 953.247681] env[61905]: DEBUG oslo.service.loopingcall [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.247885] env[61905]: DEBUG nova.compute.manager [-] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 953.247987] env[61905]: DEBUG nova.network.neutron [-] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.396797] env[61905]: INFO nova.compute.manager [-] [instance: 4b1723a2-94a2-4070-9b47-85c9c8169137] Took 1.38 seconds to deallocate network for instance. [ 953.402588] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f27c8dc-f907-42d4-b523-016d9b484c6a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "26375621-b272-4243-95bd-5cf5b946cec4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.428s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.454587] env[61905]: DEBUG oslo_vmware.api [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362784, 'name': PowerOffVM_Task, 'duration_secs': 0.505124} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.454876] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.455068] env[61905]: DEBUG nova.compute.manager [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.456126] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095099e5-7ad0-41ae-b620-228d15128f26 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.596740] env[61905]: DEBUG oslo_vmware.api [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362781, 'name': PowerOnVM_Task, 'duration_secs': 0.844347} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.597186] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.597531] env[61905]: DEBUG nova.compute.manager [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.598773] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c376798-7ebb-498b-97bd-33aba31737c1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.700265] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.881923] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.472s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.970880] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6aae18a2-3f09-4360-8c97-4f2d018b5b93 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.040264] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "26375621-b272-4243-95bd-5cf5b946cec4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.040443] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "26375621-b272-4243-95bd-5cf5b946cec4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.040632] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "26375621-b272-4243-95bd-5cf5b946cec4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.040832] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "26375621-b272-4243-95bd-5cf5b946cec4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.041016] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "26375621-b272-4243-95bd-5cf5b946cec4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.044131] env[61905]: INFO nova.compute.manager [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Terminating instance [ 954.046275] env[61905]: DEBUG nova.compute.manager [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 954.046770] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 954.047773] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3d395c-9fbd-4d11-a706-573b1ee08ea2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.058019] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.058714] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-057ad3bb-b4b4-4e99-b504-6e8c5480bb27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.069364] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 954.069364] env[61905]: value = "task-1362787" [ 954.069364] env[61905]: _type = "Task" [ 954.069364] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.080977] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.122141] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.122141] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.122141] env[61905]: DEBUG nova.objects.instance [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 954.203875] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.446833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "interface-63eb2219-fea2-4af0-90d2-e8d9ac53a138-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.446833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-63eb2219-fea2-4af0-90d2-e8d9ac53a138-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.446833] env[61905]: DEBUG nova.network.neutron [-] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.446833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-55babb93-b153-4570-8349-43eaeae53902 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "4b1723a2-94a2-4070-9b47-85c9c8169137" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.509s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.484189] env[61905]: INFO nova.scheduler.client.report [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocation for migration f394d276-5c21-48cd-8c02-d666e52f4cdd [ 954.589489] env[61905]: DEBUG nova.compute.manager [req-5c94705c-8e64-472a-a047-a1da181a118d req-cb3e4163-977b-42be-bb3e-6e03f7e834d6 service nova] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Received event network-vif-deleted-3892e570-77e4-46de-8f2b-a098cd36d007 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 954.596214] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362787, 'name': PowerOffVM_Task, 'duration_secs': 0.262} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.596975] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.597703] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.598627] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f28f4e4d-6ab7-4d31-ad13-46be770d38a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.675955] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.675955] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.675955] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleting the datastore file [datastore1] 26375621-b272-4243-95bd-5cf5b946cec4 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.675955] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-731a787c-c568-46cb-b722-220a4d00b56c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.686299] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 954.686299] env[61905]: value = "task-1362789" [ 954.686299] env[61905]: _type = "Task" [ 954.686299] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.705055] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.709822] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.864578] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.864578] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.864578] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e205663-b5e8-4631-9140-cf9bddcaa2ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.869320] env[61905]: INFO nova.compute.manager [-] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Took 1.62 seconds to deallocate network for instance. [ 954.895739] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9bb77c-5d58-4a5c-9c96-d8ebee2a316b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.925760] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Reconfiguring VM to detach interface {{(pid=61905) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 954.927195] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-791b00d7-c2f8-4e55-b93e-d3ceeedd0eff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.953845] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 954.953845] env[61905]: value = "task-1362790" [ 954.953845] env[61905]: _type = "Task" [ 954.953845] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.964670] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.990589] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f6d75298-b1b3-456d-802a-12360c2e2441 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.336s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.062549] env[61905]: DEBUG nova.objects.instance [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.137037] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9cb3416b-e98e-448f-8a93-41d5d5606836 tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.166849] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "7bb17b60-268a-4670-beb8-df5232a698ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.167229] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "7bb17b60-268a-4670-beb8-df5232a698ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.167366] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "7bb17b60-268a-4670-beb8-df5232a698ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.167558] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "7bb17b60-268a-4670-beb8-df5232a698ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.168327] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "7bb17b60-268a-4670-beb8-df5232a698ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.170071] env[61905]: INFO nova.compute.manager [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Terminating instance [ 955.172119] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "refresh_cache-7bb17b60-268a-4670-beb8-df5232a698ae" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.172310] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquired lock "refresh_cache-7bb17b60-268a-4670-beb8-df5232a698ae" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.172484] env[61905]: DEBUG nova.network.neutron [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.204041] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.208856] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.394109] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.394389] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.394732] env[61905]: DEBUG nova.objects.instance [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lazy-loading 'resources' on Instance uuid 30862de5-1cfa-494a-a81d-1215a3580339 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.476361] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.569426] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.569622] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.569863] env[61905]: DEBUG nova.network.neutron [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.571196] env[61905]: DEBUG nova.objects.instance [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'info_cache' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.695985] env[61905]: DEBUG nova.network.neutron [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.708809] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362789, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.712455] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.823586] env[61905]: DEBUG nova.network.neutron [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.969934] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.016815] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.017045] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.074133] env[61905]: DEBUG nova.objects.base [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Object Instance<7b0db0a2-c990-4160-9be8-018239425114> lazy-loaded attributes: flavor,info_cache {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 956.119217] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249c22ce-dc12-4ebd-ad5c-dea167701de7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.128686] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22eb9bd-d612-429c-9e20-50dd026be03b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.166502] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdd28fe-1824-469c-b2e1-0d3f7441b0da {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.176760] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914a78ab-cdd1-4750-a7ba-d7c4ddc2e702 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.193026] env[61905]: DEBUG nova.compute.provider_tree [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.204748] env[61905]: DEBUG oslo_vmware.api [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362789, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.497065} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.205463] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 956.205703] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 956.205889] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 956.206081] env[61905]: INFO nova.compute.manager [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Took 2.16 seconds to destroy the instance on the hypervisor. [ 956.206388] env[61905]: DEBUG oslo.service.loopingcall [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.206522] env[61905]: DEBUG nova.compute.manager [-] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 956.206617] env[61905]: DEBUG nova.network.neutron [-] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 956.211553] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.327095] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Releasing lock "refresh_cache-7bb17b60-268a-4670-beb8-df5232a698ae" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.327095] env[61905]: DEBUG nova.compute.manager [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 956.327095] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.327433] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95756428-1d28-4228-b040-54fbf807a112 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.336463] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.336757] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-847670bf-a858-450b-a973-54f2300f5e0c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.344808] env[61905]: DEBUG oslo_vmware.api [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 956.344808] env[61905]: value = "task-1362791" [ 956.344808] env[61905]: _type = "Task" [ 956.344808] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.354030] env[61905]: DEBUG oslo_vmware.api [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.467100] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.520649] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 956.700041] env[61905]: DEBUG nova.scheduler.client.report [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.716179] env[61905]: DEBUG nova.compute.manager [req-94dcce97-5b49-410f-aafc-d82d3c2b8cf4 req-899c0a5b-b8e1-4895-bb2d-84b2c81d5ce4 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Received event network-vif-deleted-262f0a20-88eb-4d13-a3d7-3033ab16713f {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 956.716397] env[61905]: INFO nova.compute.manager [req-94dcce97-5b49-410f-aafc-d82d3c2b8cf4 req-899c0a5b-b8e1-4895-bb2d-84b2c81d5ce4 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Neutron deleted interface 262f0a20-88eb-4d13-a3d7-3033ab16713f; detaching it from the instance and deleting it from the info cache [ 956.716722] env[61905]: DEBUG nova.network.neutron [req-94dcce97-5b49-410f-aafc-d82d3c2b8cf4 req-899c0a5b-b8e1-4895-bb2d-84b2c81d5ce4 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.722865] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362786, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.090576} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.723138] env[61905]: INFO nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50/OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50.vmdk to [datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk. [ 956.723329] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Cleaning up location [datastore2] OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 956.723495] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c94f2b63-b226-465c-8db3-a269337f2c50 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.723754] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99bac05a-0ee1-488e-8037-790b3b1192fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.732699] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 956.732699] env[61905]: value = "task-1362792" [ 956.732699] env[61905]: _type = "Task" [ 956.732699] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.744337] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362792, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.824177] env[61905]: DEBUG nova.network.neutron [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [{"id": "55782888-9c3d-4f40-852f-9cff30eb514b", "address": "fa:16:3e:6e:f1:7a", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55782888-9c", "ovs_interfaceid": "55782888-9c3d-4f40-852f-9cff30eb514b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.859048] env[61905]: DEBUG oslo_vmware.api [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362791, 'name': PowerOffVM_Task, 'duration_secs': 0.140941} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.859495] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.859847] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.860235] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-156cbd86-e973-4cf6-9f5f-a2e748665152 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.894638] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.894638] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.894638] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Deleting the datastore file [datastore2] 7bb17b60-268a-4670-beb8-df5232a698ae {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.894638] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-580d3435-0654-4944-b561-0251eba6a1f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.903617] env[61905]: DEBUG oslo_vmware.api [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for the task: (returnval){ [ 956.903617] env[61905]: value = "task-1362794" [ 956.903617] env[61905]: _type = "Task" [ 956.903617] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.913891] env[61905]: DEBUG oslo_vmware.api [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.968393] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.046056] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.189240] env[61905]: DEBUG nova.network.neutron [-] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.205236] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.811s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.208093] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.162s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.210297] env[61905]: INFO nova.compute.claims [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.218808] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f67c88e9-9814-4936-afc1-83cdbee3746e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.230458] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4e114b-dafb-49e3-a0f2-082e373a60b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.246575] env[61905]: INFO nova.scheduler.client.report [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance 30862de5-1cfa-494a-a81d-1215a3580339 [ 957.271308] env[61905]: DEBUG nova.compute.manager [req-94dcce97-5b49-410f-aafc-d82d3c2b8cf4 req-899c0a5b-b8e1-4895-bb2d-84b2c81d5ce4 service nova] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Detach interface failed, port_id=262f0a20-88eb-4d13-a3d7-3033ab16713f, reason: Instance 26375621-b272-4243-95bd-5cf5b946cec4 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 957.274949] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362792, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037808} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.275223] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.275384] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.275670] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk to [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 957.275866] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54973433-eb32-4297-860d-96b98073c9f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.284824] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 957.284824] env[61905]: value = "task-1362795" [ 957.284824] env[61905]: _type = "Task" [ 957.284824] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.295207] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.295207] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.295422] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.295606] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.295773] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.297581] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.298097] env[61905]: INFO nova.compute.manager [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Terminating instance [ 957.300087] env[61905]: DEBUG nova.compute.manager [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 957.300362] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.301726] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6bf2d5-dd2a-456d-90d3-8d16043085d7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.310284] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.310677] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a725c4c2-9119-4092-a513-262f60a2ed49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.320019] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 957.320019] env[61905]: value = "task-1362796" [ 957.320019] env[61905]: _type = "Task" [ 957.320019] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.327470] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.329119] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362796, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.422412] env[61905]: DEBUG oslo_vmware.api [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Task: {'id': task-1362794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.114377} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.426087] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.426087] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.426087] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.426087] env[61905]: INFO nova.compute.manager [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Took 1.10 seconds to destroy the instance on the hypervisor. [ 957.426087] env[61905]: DEBUG oslo.service.loopingcall [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.426087] env[61905]: DEBUG nova.compute.manager [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 957.426087] env[61905]: DEBUG nova.network.neutron [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 957.441272] env[61905]: DEBUG nova.network.neutron [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.468253] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.691844] env[61905]: INFO nova.compute.manager [-] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Took 1.49 seconds to deallocate network for instance. [ 957.757513] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c8c226be-7174-4b31-a561-05084613a499 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "30862de5-1cfa-494a-a81d-1215a3580339" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.665s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.797155] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.829021] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362796, 'name': PowerOffVM_Task, 'duration_secs': 0.214647} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.829366] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.829542] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.830900] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 957.831166] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecd6c99f-0501-4cc8-bb2e-977ec90375e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.832866] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90c135d4-e7ed-4524-bc67-33c2714de976 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.843864] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.844040] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.845953] env[61905]: DEBUG oslo_vmware.api [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 957.845953] env[61905]: value = "task-1362797" [ 957.845953] env[61905]: _type = "Task" [ 957.845953] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.856054] env[61905]: DEBUG oslo_vmware.api [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.944327] env[61905]: DEBUG nova.network.neutron [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.957865] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.958161] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.958382] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleting the datastore file [datastore2] b9400202-eb37-4c75-bbf3-807edb7bc16f {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.959082] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d35a511-6218-4d9e-92f3-8e633559aab4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.971032] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.972660] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 957.972660] env[61905]: value = "task-1362799" [ 957.972660] env[61905]: _type = "Task" [ 957.972660] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.982494] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.191804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "55a9190b-52f7-4bba-81b0-079e62537183" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.192166] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "55a9190b-52f7-4bba-81b0-079e62537183" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.192463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "55a9190b-52f7-4bba-81b0-079e62537183-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.192706] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "55a9190b-52f7-4bba-81b0-079e62537183-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.192918] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "55a9190b-52f7-4bba-81b0-079e62537183-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.195170] env[61905]: INFO nova.compute.manager [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Terminating instance [ 958.197202] env[61905]: DEBUG nova.compute.manager [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 958.197411] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 958.198290] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6d6bc4-c39b-4a00-9fa9-9f4da7d89ae6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.202644] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.207346] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 958.207772] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e74da92d-26f1-41c2-ad08-cd6d66388826 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.215790] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 958.215790] env[61905]: value = "task-1362800" [ 958.215790] env[61905]: _type = "Task" [ 958.215790] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.230440] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.296599] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.347636] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 958.363773] env[61905]: DEBUG oslo_vmware.api [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362797, 'name': PowerOnVM_Task} progress is 76%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.408069] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4f69b1-500f-411e-baff-1485c75e2200 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.418012] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0b8347-09ef-409d-9760-43bc68998318 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.450848] env[61905]: INFO nova.compute.manager [-] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Took 1.03 seconds to deallocate network for instance. [ 958.453676] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86852f6-3f99-4e98-83f3-116e3597bcf3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.467138] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebd5564-bcf8-4f7e-a3a9-1c8ca902b511 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.474859] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.488063] env[61905]: DEBUG nova.compute.provider_tree [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.495791] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.729439] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.797053] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.829549] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.829856] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.864493] env[61905]: DEBUG oslo_vmware.api [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362797, 'name': PowerOnVM_Task, 'duration_secs': 0.930973} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.867070] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 958.867294] env[61905]: DEBUG nova.compute.manager [None req-4114f306-e524-4bcb-a9ae-acf900c5b4c4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 958.868568] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229572c4-9ff8-446c-b540-0627b884d276 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.879248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.966597] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.971301] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.985708] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.992496] env[61905]: DEBUG nova.scheduler.client.report [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 959.231140] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362800, 'name': PowerOffVM_Task, 'duration_secs': 0.980242} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.231527] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.231615] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.231969] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c70f1d2d-a86a-455d-8dfd-fe2500a62beb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.298030] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.332844] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 959.472435] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.486809] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.497890] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.498547] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 959.501752] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.299s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.501752] env[61905]: DEBUG nova.objects.instance [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lazy-loading 'resources' on Instance uuid 26375621-b272-4243-95bd-5cf5b946cec4 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 959.759670] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 959.759938] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 959.760044] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleting the datastore file [datastore2] 55a9190b-52f7-4bba-81b0-079e62537183 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 959.760346] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a0515e4-bf5f-4d34-8ff8-2139607f37ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.769774] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 959.769774] env[61905]: value = "task-1362802" [ 959.769774] env[61905]: _type = "Task" [ 959.769774] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.777850] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.796583] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.857883] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.971018] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.986556] env[61905]: DEBUG oslo_vmware.api [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.844366} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.986804] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 959.987141] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 959.987373] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 959.987558] env[61905]: INFO nova.compute.manager [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Took 2.69 seconds to destroy the instance on the hypervisor. [ 959.987803] env[61905]: DEBUG oslo.service.loopingcall [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.987991] env[61905]: DEBUG nova.compute.manager [-] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 959.988100] env[61905]: DEBUG nova.network.neutron [-] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.004692] env[61905]: DEBUG nova.compute.utils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.006394] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 960.006394] env[61905]: DEBUG nova.network.neutron [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 960.055321] env[61905]: DEBUG nova.policy [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f27bcad7ab3b4e0e98065f24300f9425', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30edd7bc94ee492cb7f4e4f388e45b8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 960.171966] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f884aef-6414-44e4-92d6-0c44b3623df2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.181483] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7376dd3-c5c1-4789-8ce5-988e18eb47b0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.212468] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1af42e-1166-4015-b430-7eed6cc91a59 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.220747] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b57d5c-708a-4f85-aee8-b7f08a48d5c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.234663] env[61905]: DEBUG nova.compute.provider_tree [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.282839] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.293702] env[61905]: DEBUG nova.compute.manager [req-2eb86e66-e186-4f17-b38e-c29e9d1f7852 req-af5499bd-3717-4fa1-8216-22d993dc57f8 service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Received event network-vif-deleted-3802415e-d978-40f5-8265-2e03cbdd0814 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.293874] env[61905]: INFO nova.compute.manager [req-2eb86e66-e186-4f17-b38e-c29e9d1f7852 req-af5499bd-3717-4fa1-8216-22d993dc57f8 service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Neutron deleted interface 3802415e-d978-40f5-8265-2e03cbdd0814; detaching it from the instance and deleting it from the info cache [ 960.294009] env[61905]: DEBUG nova.network.neutron [req-2eb86e66-e186-4f17-b38e-c29e9d1f7852 req-af5499bd-3717-4fa1-8216-22d993dc57f8 service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.301717] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362795, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.530316} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.301974] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/af6a522f-8c87-46b5-bf21-04939866f8ef/af6a522f-8c87-46b5-bf21-04939866f8ef.vmdk to [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.302766] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff86694d-e6d6-4f4a-9e7d-b7fa5fdcf5cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.327573] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk or device None with type streamOptimized {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.328223] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9be446f1-57d4-4c60-b458-6425051d9fc2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.354985] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 960.354985] env[61905]: value = "task-1362803" [ 960.354985] env[61905]: _type = "Task" [ 960.354985] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.358637] env[61905]: DEBUG nova.network.neutron [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Successfully created port: 356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.367334] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362803, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.471269] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.509175] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 960.737731] env[61905]: DEBUG nova.scheduler.client.report [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.765513] env[61905]: DEBUG nova.network.neutron [-] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.781803] env[61905]: DEBUG oslo_vmware.api [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.896063} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.782074] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.782274] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.782465] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.782624] env[61905]: INFO nova.compute.manager [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Took 2.59 seconds to destroy the instance on the hypervisor. [ 960.782870] env[61905]: DEBUG oslo.service.loopingcall [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.783077] env[61905]: DEBUG nova.compute.manager [-] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.783173] env[61905]: DEBUG nova.network.neutron [-] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 960.797209] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-591a3fb0-f166-41bd-adef-d2ceb51a69c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.807538] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41dc7c0-78fc-4eda-bed8-b8eaf7d6611e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.838783] env[61905]: DEBUG nova.compute.manager [req-2eb86e66-e186-4f17-b38e-c29e9d1f7852 req-af5499bd-3717-4fa1-8216-22d993dc57f8 service nova] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Detach interface failed, port_id=3802415e-d978-40f5-8265-2e03cbdd0814, reason: Instance b9400202-eb37-4c75-bbf3-807edb7bc16f could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 960.864909] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362803, 'name': ReconfigVM_Task, 'duration_secs': 0.282339} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.865212] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299/1502df44-9166-4ce8-9117-a57e7be2d299.vmdk or device None with type streamOptimized {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.865845] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c20ffb0b-31a2-445e-9560-72be8f7722c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.872641] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 960.872641] env[61905]: value = "task-1362804" [ 960.872641] env[61905]: _type = "Task" [ 960.872641] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.880930] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362804, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.972298] env[61905]: DEBUG oslo_vmware.api [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362790, 'name': ReconfigVM_Task, 'duration_secs': 5.787886} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.972772] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.973066] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Reconfigured VM to detach interface {{(pid=61905) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 961.243089] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.245388] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.366s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.247081] env[61905]: INFO nova.compute.claims [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 961.267672] env[61905]: INFO nova.compute.manager [-] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Took 1.28 seconds to deallocate network for instance. [ 961.270342] env[61905]: INFO nova.scheduler.client.report [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted allocations for instance 26375621-b272-4243-95bd-5cf5b946cec4 [ 961.383446] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362804, 'name': Rename_Task, 'duration_secs': 0.138829} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.383739] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.384011] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43162999-7e2f-4d65-b07c-20a917e64295 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.391183] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 961.391183] env[61905]: value = "task-1362805" [ 961.391183] env[61905]: _type = "Task" [ 961.391183] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.408817] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.519458] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 961.539990] env[61905]: DEBUG nova.network.neutron [-] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.547020] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 961.547279] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 961.547443] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.547631] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 961.547811] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.548028] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 961.548261] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 961.548425] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 961.548596] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 961.548760] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 961.548950] env[61905]: DEBUG nova.virt.hardware [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 961.549923] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c41fe2-f1cf-4c29-8d2c-250b960ab056 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.559228] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56e67ba-02f5-4a83-9a39-edf7ec14a8c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.766606] env[61905]: DEBUG nova.compute.manager [req-f85b1db1-aadb-4e65-ab2c-273960ac7de5 req-c7a6e93f-8d82-4ac0-ab51-10d549b4f03d service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Received event network-vif-plugged-356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 961.766884] env[61905]: DEBUG oslo_concurrency.lockutils [req-f85b1db1-aadb-4e65-ab2c-273960ac7de5 req-c7a6e93f-8d82-4ac0-ab51-10d549b4f03d service nova] Acquiring lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.767211] env[61905]: DEBUG oslo_concurrency.lockutils [req-f85b1db1-aadb-4e65-ab2c-273960ac7de5 req-c7a6e93f-8d82-4ac0-ab51-10d549b4f03d service nova] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.767382] env[61905]: DEBUG oslo_concurrency.lockutils [req-f85b1db1-aadb-4e65-ab2c-273960ac7de5 req-c7a6e93f-8d82-4ac0-ab51-10d549b4f03d service nova] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.769676] env[61905]: DEBUG nova.compute.manager [req-f85b1db1-aadb-4e65-ab2c-273960ac7de5 req-c7a6e93f-8d82-4ac0-ab51-10d549b4f03d service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] No waiting events found dispatching network-vif-plugged-356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 961.769862] env[61905]: WARNING nova.compute.manager [req-f85b1db1-aadb-4e65-ab2c-273960ac7de5 req-c7a6e93f-8d82-4ac0-ab51-10d549b4f03d service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Received unexpected event network-vif-plugged-356a18e0-71dc-40e6-be4e-a823167086ef for instance with vm_state building and task_state spawning. [ 961.781862] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.785461] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e3107b77-4757-4c85-bcd3-e7f82f0c02c3 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "26375621-b272-4243-95bd-5cf5b946cec4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.745s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.872344] env[61905]: DEBUG nova.network.neutron [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Successfully updated port: 356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.903904] env[61905]: DEBUG oslo_vmware.api [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362805, 'name': PowerOnVM_Task, 'duration_secs': 0.48212} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.904338] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.010207] env[61905]: DEBUG nova.compute.manager [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 962.011387] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd96402d-f695-4820-bc5a-2831d644d348 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.043498] env[61905]: INFO nova.compute.manager [-] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Took 1.26 seconds to deallocate network for instance. [ 962.328866] env[61905]: DEBUG nova.compute.manager [req-bbac71aa-cf59-4d3e-b2bd-8bd2e96a5d46 req-3c45f0f8-ae83-4595-a9dc-069b3158025d service nova] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Received event network-vif-deleted-ab6551f0-7329-4cd9-8d65-f6b7e18984ed {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.358025] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.358281] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.358519] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.358716] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.358886] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.362079] env[61905]: INFO nova.compute.manager [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Terminating instance [ 962.364422] env[61905]: DEBUG nova.compute.manager [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.364663] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.366023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca2fcfe-ed67-4fa1-a6cc-10232252f9dc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.369316] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.369634] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquired lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.369935] env[61905]: DEBUG nova.network.neutron [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.375216] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-e810c9bb-ffb1-47f2-bc23-375520a2f50d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.375554] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-e810c9bb-ffb1-47f2-bc23-375520a2f50d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.375554] env[61905]: DEBUG nova.network.neutron [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.379050] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.380493] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5bfc972-3f1c-4967-98fe-64c36ab096c6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.391453] env[61905]: DEBUG oslo_vmware.api [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 962.391453] env[61905]: value = "task-1362806" [ 962.391453] env[61905]: _type = "Task" [ 962.391453] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.401786] env[61905]: DEBUG oslo_vmware.api [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362806, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.430691] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c349f4-35ee-4a17-80ad-8fce8f9ec501 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.439155] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cd2806-84ad-4fb8-b2a1-0d8259713e6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.472787] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9831e5a5-2fe2-415a-b85b-20dbe64654b6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.481343] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491a80d8-7b42-4336-a089-d6f1bb0c556b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.496133] env[61905]: DEBUG nova.compute.provider_tree [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.532896] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f41859f7-4a1b-4930-a97f-56a5d4099374 tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.002s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.552541] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.861459] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "c79ae168-cf98-4b0a-a55d-a39d66f82462" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.861806] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.903725] env[61905]: DEBUG oslo_vmware.api [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362806, 'name': PowerOffVM_Task, 'duration_secs': 0.427284} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.904651] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.904651] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.904651] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03114588-b893-4f1f-83d4-2ece12f49822 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.915135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.915433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.915690] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.915891] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.916114] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.918144] env[61905]: INFO nova.compute.manager [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Terminating instance [ 962.920068] env[61905]: DEBUG nova.compute.manager [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.920305] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.921136] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0f65d9-063b-4181-a1b7-59d7dd6b24b9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.926483] env[61905]: DEBUG nova.network.neutron [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.932840] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.933173] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ad68e5d-76a1-49d8-8c44-3a64f2542ad4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.939922] env[61905]: DEBUG oslo_vmware.api [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 962.939922] env[61905]: value = "task-1362808" [ 962.939922] env[61905]: _type = "Task" [ 962.939922] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.949601] env[61905]: DEBUG oslo_vmware.api [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.990478] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.990854] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.991103] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleting the datastore file [datastore1] 22b6d87c-08c5-492c-a963-f7ad6ef5db5b {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.991451] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81ae56c0-1c11-464e-a1e9-8830d7f0c854 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.000064] env[61905]: DEBUG nova.scheduler.client.report [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 963.006728] env[61905]: DEBUG oslo_vmware.api [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 963.006728] env[61905]: value = "task-1362809" [ 963.006728] env[61905]: _type = "Task" [ 963.006728] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.017181] env[61905]: DEBUG oslo_vmware.api [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362809, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.192973] env[61905]: DEBUG nova.network.neutron [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Updating instance_info_cache with network_info: [{"id": "356a18e0-71dc-40e6-be4e-a823167086ef", "address": "fa:16:3e:33:d6:6e", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap356a18e0-71", "ovs_interfaceid": "356a18e0-71dc-40e6-be4e-a823167086ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.253768] env[61905]: INFO nova.network.neutron [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Port 1a2f5768-0301-4ff2-a1ae-e02fe03be64b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 963.254240] env[61905]: DEBUG nova.network.neutron [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [{"id": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "address": "fa:16:3e:3d:fc:06", "network": {"id": "dd057eb4-847f-4d06-88bd-a25b4fd8db1f", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1810627924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8cd0317a9e0e4f1d86c49a82e8ffbaa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9c4edd5-d88e-4996-afea-00130ace0dad", "external-id": "nsx-vlan-transportzone-261", "segmentation_id": 261, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1260d42-8e", "ovs_interfaceid": "d1260d42-8ebd-4227-91b1-e34c80b3bdb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.364383] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 963.453136] env[61905]: DEBUG oslo_vmware.api [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362808, 'name': PowerOffVM_Task, 'duration_secs': 0.223111} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.453418] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.453588] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.453844] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b781bf5-09fb-4a28-ba65-203964489518 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.508288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.508872] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 963.512030] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.546s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.512816] env[61905]: DEBUG nova.objects.instance [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lazy-loading 'resources' on Instance uuid 7bb17b60-268a-4670-beb8-df5232a698ae {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.523421] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.523421] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.523753] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleting the datastore file [datastore2] 63eb2219-fea2-4af0-90d2-e8d9ac53a138 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.523819] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5514be8-ea7e-4c45-8a51-65b615de2460 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.529179] env[61905]: DEBUG oslo_vmware.api [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362809, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15829} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.529797] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.530016] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.530213] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.530394] env[61905]: INFO nova.compute.manager [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 963.530639] env[61905]: DEBUG oslo.service.loopingcall [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.530836] env[61905]: DEBUG nova.compute.manager [-] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.531065] env[61905]: DEBUG nova.network.neutron [-] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.534941] env[61905]: DEBUG oslo_vmware.api [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 963.534941] env[61905]: value = "task-1362811" [ 963.534941] env[61905]: _type = "Task" [ 963.534941] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.544162] env[61905]: DEBUG oslo_vmware.api [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.695945] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-e810c9bb-ffb1-47f2-bc23-375520a2f50d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.698036] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Instance network_info: |[{"id": "356a18e0-71dc-40e6-be4e-a823167086ef", "address": "fa:16:3e:33:d6:6e", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap356a18e0-71", "ovs_interfaceid": "356a18e0-71dc-40e6-be4e-a823167086ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 963.698036] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:d6:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '356a18e0-71dc-40e6-be4e-a823167086ef', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.708018] env[61905]: DEBUG oslo.service.loopingcall [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.708018] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.708018] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb80bce7-9b5a-484c-9169-ea5b3ebb9a7a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.731175] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.731175] env[61905]: value = "task-1362812" [ 963.731175] env[61905]: _type = "Task" [ 963.731175] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.745480] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362812, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.757331] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Releasing lock "refresh_cache-63eb2219-fea2-4af0-90d2-e8d9ac53a138" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.794558] env[61905]: DEBUG nova.compute.manager [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Received event network-changed-356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 963.794788] env[61905]: DEBUG nova.compute.manager [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Refreshing instance network info cache due to event network-changed-356a18e0-71dc-40e6-be4e-a823167086ef. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 963.795045] env[61905]: DEBUG oslo_concurrency.lockutils [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] Acquiring lock "refresh_cache-e810c9bb-ffb1-47f2-bc23-375520a2f50d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.795237] env[61905]: DEBUG oslo_concurrency.lockutils [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] Acquired lock "refresh_cache-e810c9bb-ffb1-47f2-bc23-375520a2f50d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.795428] env[61905]: DEBUG nova.network.neutron [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Refreshing network info cache for port 356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.892776] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.022060] env[61905]: DEBUG nova.compute.utils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 964.023761] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 964.023946] env[61905]: DEBUG nova.network.neutron [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 964.048548] env[61905]: DEBUG oslo_vmware.api [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213474} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.048548] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.048548] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.048548] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.048907] env[61905]: INFO nova.compute.manager [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Took 1.13 seconds to destroy the instance on the hypervisor. [ 964.048907] env[61905]: DEBUG oslo.service.loopingcall [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 964.049297] env[61905]: DEBUG nova.compute.manager [-] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 964.049405] env[61905]: DEBUG nova.network.neutron [-] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 964.092914] env[61905]: DEBUG nova.policy [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '302950aecbc54ee0843853aac306fab2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28c767f45ae54b8fbfe2c93fc9027447', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 964.206592] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c32df8a-3828-45c0-bf2e-8f716ff1581b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.215493] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e427759-91c9-4e23-ac0e-2924f9af262e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.252707] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a09259-0f1a-41b7-8dfa-7c1ee3b4f441 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.261436] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0c256a60-1342-41d3-97c5-e23b5f462cd7 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "interface-63eb2219-fea2-4af0-90d2-e8d9ac53a138-1a2f5768-0301-4ff2-a1ae-e02fe03be64b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.905s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.266735] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97340f4b-cc5c-4ecd-a55d-6909bae40197 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.270978] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362812, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.285606] env[61905]: DEBUG nova.compute.provider_tree [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.332024] env[61905]: DEBUG nova.network.neutron [-] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.526737] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 964.574264] env[61905]: DEBUG nova.network.neutron [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Updated VIF entry in instance network info cache for port 356a18e0-71dc-40e6-be4e-a823167086ef. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.574264] env[61905]: DEBUG nova.network.neutron [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Updating instance_info_cache with network_info: [{"id": "356a18e0-71dc-40e6-be4e-a823167086ef", "address": "fa:16:3e:33:d6:6e", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap356a18e0-71", "ovs_interfaceid": "356a18e0-71dc-40e6-be4e-a823167086ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.622096] env[61905]: DEBUG nova.network.neutron [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Successfully created port: 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.760749] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362812, 'name': CreateVM_Task, 'duration_secs': 0.613643} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.760953] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 964.761692] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.761823] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.762178] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 964.762449] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae56727a-d98c-41e4-a57a-4ea01fec9e83 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.768664] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 964.768664] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52592b6f-5454-083e-da70-f023a1572081" [ 964.768664] env[61905]: _type = "Task" [ 964.768664] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.778083] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52592b6f-5454-083e-da70-f023a1572081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.788118] env[61905]: DEBUG nova.scheduler.client.report [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.836081] env[61905]: INFO nova.compute.manager [-] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Took 1.30 seconds to deallocate network for instance. [ 965.075987] env[61905]: DEBUG oslo_concurrency.lockutils [req-52025172-7922-483f-98f6-ab4d8ce75d8a req-3e5cdf22-8c65-4000-86c4-a2ac8daa01d1 service nova] Releasing lock "refresh_cache-e810c9bb-ffb1-47f2-bc23-375520a2f50d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.280068] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52592b6f-5454-083e-da70-f023a1572081, 'name': SearchDatastore_Task, 'duration_secs': 0.023177} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.280403] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.280643] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.280878] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.281086] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.281281] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.281760] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58ce659b-cef7-41c6-b850-6cd7b89ca15b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.293925] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.782s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.295822] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.296008] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.297347] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.440s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.298835] env[61905]: INFO nova.compute.claims [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.301640] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-295b58c0-d365-403b-b40a-ba95abae0f72 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.308877] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 965.308877] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a368f-0e00-bd77-ddc3-dd0be6598710" [ 965.308877] env[61905]: _type = "Task" [ 965.308877] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.317256] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a368f-0e00-bd77-ddc3-dd0be6598710, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.331796] env[61905]: INFO nova.scheduler.client.report [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Deleted allocations for instance 7bb17b60-268a-4670-beb8-df5232a698ae [ 965.346135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.548563] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 965.576412] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.576686] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.576846] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.577077] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.577255] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.577408] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.577610] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.577771] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.577938] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.578118] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.578307] env[61905]: DEBUG nova.virt.hardware [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.579710] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8757f9f1-c6c0-4be1-b12d-c6c060b2bdde {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.589716] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b92db01-c199-4c4f-b230-3c8d79b41af7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.820757] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a368f-0e00-bd77-ddc3-dd0be6598710, 'name': SearchDatastore_Task, 'duration_secs': 0.019906} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.823010] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17830f2a-bfe7-40e4-a046-2b2fa1b4a18e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.827159] env[61905]: DEBUG nova.compute.manager [req-d83a498d-2451-491e-8eb7-8c7b07fd3984 req-aef39de5-4df7-4d3a-a58f-c8a4a63dbb07 service nova] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Received event network-vif-deleted-be53f19f-74d1-4618-a4d5-92af42f53641 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.827387] env[61905]: DEBUG nova.compute.manager [req-d83a498d-2451-491e-8eb7-8c7b07fd3984 req-aef39de5-4df7-4d3a-a58f-c8a4a63dbb07 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Received event network-vif-deleted-d1260d42-8ebd-4227-91b1-e34c80b3bdb0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.827557] env[61905]: INFO nova.compute.manager [req-d83a498d-2451-491e-8eb7-8c7b07fd3984 req-aef39de5-4df7-4d3a-a58f-c8a4a63dbb07 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Neutron deleted interface d1260d42-8ebd-4227-91b1-e34c80b3bdb0; detaching it from the instance and deleting it from the info cache [ 965.827729] env[61905]: DEBUG nova.network.neutron [req-d83a498d-2451-491e-8eb7-8c7b07fd3984 req-aef39de5-4df7-4d3a-a58f-c8a4a63dbb07 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.833927] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 965.833927] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5299ae93-bb5c-de87-328d-bbb2c8b8e8b2" [ 965.833927] env[61905]: _type = "Task" [ 965.833927] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.844407] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5299ae93-bb5c-de87-328d-bbb2c8b8e8b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.845392] env[61905]: DEBUG oslo_concurrency.lockutils [None req-18c55b61-eda0-4245-8fdf-ba5a13cecbaa tempest-ServerShowV257Test-1075131130 tempest-ServerShowV257Test-1075131130-project-member] Lock "7bb17b60-268a-4670-beb8-df5232a698ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.678s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.897852] env[61905]: DEBUG nova.network.neutron [-] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.331637] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b75aab07-54c7-4b0e-bd8d-18c6897b7fc4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.343611] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b19ac2-ba0c-4701-a8ef-37d67a21f8a0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.359790] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5299ae93-bb5c-de87-328d-bbb2c8b8e8b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010961} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.361070] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.361070] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e810c9bb-ffb1-47f2-bc23-375520a2f50d/e810c9bb-ffb1-47f2-bc23-375520a2f50d.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.361311] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c7b977f4-baaf-4c95-9394-09b2f429b335 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.381411] env[61905]: DEBUG nova.compute.manager [req-d83a498d-2451-491e-8eb7-8c7b07fd3984 req-aef39de5-4df7-4d3a-a58f-c8a4a63dbb07 service nova] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Detach interface failed, port_id=d1260d42-8ebd-4227-91b1-e34c80b3bdb0, reason: Instance 63eb2219-fea2-4af0-90d2-e8d9ac53a138 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 966.382223] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 966.382223] env[61905]: value = "task-1362813" [ 966.382223] env[61905]: _type = "Task" [ 966.382223] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.391826] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362813, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.400898] env[61905]: INFO nova.compute.manager [-] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Took 2.35 seconds to deallocate network for instance. [ 966.503590] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1f9ac1-0f4f-4f2a-b14d-5251fd75974b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.512520] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87dfbcc9-6dc6-4339-8572-de4f27aae2f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.545656] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3ead4b-41b8-404a-9f94-69ed85d016a5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.555447] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005e55e6-010b-4636-ad94-9f8ded390e9d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.570911] env[61905]: DEBUG nova.compute.provider_tree [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.632981] env[61905]: DEBUG nova.network.neutron [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Successfully updated port: 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 966.892657] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362813, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.911528] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.074431] env[61905]: DEBUG nova.scheduler.client.report [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 967.136248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.136421] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.136546] env[61905]: DEBUG nova.network.neutron [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.394030] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362813, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525196} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.394405] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e810c9bb-ffb1-47f2-bc23-375520a2f50d/e810c9bb-ffb1-47f2-bc23-375520a2f50d.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.394645] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.394902] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d323a08b-6221-412c-a1ed-a40d505bc462 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.401750] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 967.401750] env[61905]: value = "task-1362814" [ 967.401750] env[61905]: _type = "Task" [ 967.401750] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.410199] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.579212] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.579991] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 967.582782] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.801s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.582992] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.585023] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.033s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.585317] env[61905]: DEBUG nova.objects.instance [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lazy-loading 'resources' on Instance uuid 55a9190b-52f7-4bba-81b0-079e62537183 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.606230] env[61905]: INFO nova.scheduler.client.report [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocations for instance b9400202-eb37-4c75-bbf3-807edb7bc16f [ 967.679548] env[61905]: DEBUG nova.network.neutron [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.808847] env[61905]: DEBUG nova.network.neutron [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating instance_info_cache with network_info: [{"id": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "address": "fa:16:3e:bb:0a:52", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d82d8e0-26", "ovs_interfaceid": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.863313] env[61905]: DEBUG nova.compute.manager [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Received event network-vif-plugged-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.863434] env[61905]: DEBUG oslo_concurrency.lockutils [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.863753] env[61905]: DEBUG oslo_concurrency.lockutils [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.863865] env[61905]: DEBUG oslo_concurrency.lockutils [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.864053] env[61905]: DEBUG nova.compute.manager [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] No waiting events found dispatching network-vif-plugged-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 967.864230] env[61905]: WARNING nova.compute.manager [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Received unexpected event network-vif-plugged-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 for instance with vm_state building and task_state spawning. [ 967.864399] env[61905]: DEBUG nova.compute.manager [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Received event network-changed-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.864643] env[61905]: DEBUG nova.compute.manager [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Refreshing instance network info cache due to event network-changed-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 967.864775] env[61905]: DEBUG oslo_concurrency.lockutils [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] Acquiring lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.912921] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071375} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.913210] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.913998] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0e76fe-943a-4ef6-b978-2f3495c6e374 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.937418] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] e810c9bb-ffb1-47f2-bc23-375520a2f50d/e810c9bb-ffb1-47f2-bc23-375520a2f50d.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.938081] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42150ad6-a987-4c0b-a29f-21ac459fe2ca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.958655] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 967.958655] env[61905]: value = "task-1362815" [ 967.958655] env[61905]: _type = "Task" [ 967.958655] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.970278] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362815, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.088887] env[61905]: DEBUG nova.compute.utils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 968.094046] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 968.094046] env[61905]: DEBUG nova.network.neutron [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 968.113033] env[61905]: DEBUG oslo_concurrency.lockutils [None req-cd754793-7ac5-444c-8060-88d8b7a17620 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "b9400202-eb37-4c75-bbf3-807edb7bc16f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.818s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.156994] env[61905]: DEBUG nova.policy [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '947946764fc64847946057d867de54bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '980cc259c0254e84989e0cfc0e45837f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 968.231663] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307b6ad4-c6a4-40cc-9d39-3f553de84b33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.239585] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b78083-a6e2-43f6-9e81-59e676fa8c3d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.269730] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeabd8f7-04d3-4c89-a11c-404584c883ce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.277247] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd350828-b2ae-47f9-936c-a50c6c01a8e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.290062] env[61905]: DEBUG nova.compute.provider_tree [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.311853] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.312631] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Instance network_info: |[{"id": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "address": "fa:16:3e:bb:0a:52", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d82d8e0-26", "ovs_interfaceid": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 968.312631] env[61905]: DEBUG oslo_concurrency.lockutils [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] Acquired lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.312851] env[61905]: DEBUG nova.network.neutron [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Refreshing network info cache for port 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.313853] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:0a:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d82d8e0-2624-4d0b-a98b-1cfd93ccf628', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.321413] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Creating folder: Project (28c767f45ae54b8fbfe2c93fc9027447). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 968.322061] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7c1ae28-8090-410a-b094-9fa2a78d7342 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.333699] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Created folder: Project (28c767f45ae54b8fbfe2c93fc9027447) in parent group-v289968. [ 968.333883] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Creating folder: Instances. Parent ref: group-v290089. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 968.334123] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d98c72ba-d94b-4157-928e-66463fc16c6a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.342833] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Created folder: Instances in parent group-v290089. [ 968.343079] env[61905]: DEBUG oslo.service.loopingcall [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.343262] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 968.343464] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b763e290-d425-4d74-8027-e14f3ba69817 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.363401] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.363401] env[61905]: value = "task-1362818" [ 968.363401] env[61905]: _type = "Task" [ 968.363401] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.371218] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362818, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.413241] env[61905]: DEBUG nova.network.neutron [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Successfully created port: 799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 968.472074] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362815, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.594295] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 968.793422] env[61905]: DEBUG nova.scheduler.client.report [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 968.872769] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362818, 'name': CreateVM_Task, 'duration_secs': 0.312991} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.873111] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.873606] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.873775] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.874108] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 968.874645] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ea8a9f1-f0ce-48ae-92a3-764102b924fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.879215] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 968.879215] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5296c5fc-1b21-193b-c54c-48258377df71" [ 968.879215] env[61905]: _type = "Task" [ 968.879215] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.888184] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5296c5fc-1b21-193b-c54c-48258377df71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.969688] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362815, 'name': ReconfigVM_Task, 'duration_secs': 0.719482} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.970030] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Reconfigured VM instance instance-0000005d to attach disk [datastore2] e810c9bb-ffb1-47f2-bc23-375520a2f50d/e810c9bb-ffb1-47f2-bc23-375520a2f50d.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.970659] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cb6aed7-af1f-49db-9a2b-39e359dc180f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.978638] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 968.978638] env[61905]: value = "task-1362819" [ 968.978638] env[61905]: _type = "Task" [ 968.978638] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.988640] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362819, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.007354] env[61905]: DEBUG nova.network.neutron [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updated VIF entry in instance network info cache for port 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.007722] env[61905]: DEBUG nova.network.neutron [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating instance_info_cache with network_info: [{"id": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "address": "fa:16:3e:bb:0a:52", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d82d8e0-26", "ovs_interfaceid": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.298298] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.713s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.302281] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.409s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.303202] env[61905]: INFO nova.compute.claims [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.323178] env[61905]: INFO nova.scheduler.client.report [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance 55a9190b-52f7-4bba-81b0-079e62537183 [ 969.389673] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5296c5fc-1b21-193b-c54c-48258377df71, 'name': SearchDatastore_Task, 'duration_secs': 0.009351} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.390032] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.390261] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.390527] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.390688] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.390868] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.391165] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e717c806-ac4c-435a-895a-20639f2ebec6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.399228] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.399406] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.400330] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a2bfc0c-995e-4420-aef1-1471ad9ac868 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.405579] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 969.405579] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c4dfd5-2f68-9e5e-a2e6-85cd050527c2" [ 969.405579] env[61905]: _type = "Task" [ 969.405579] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.412673] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c4dfd5-2f68-9e5e-a2e6-85cd050527c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.488625] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362819, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.510782] env[61905]: DEBUG oslo_concurrency.lockutils [req-ccaa1479-aba5-4507-b59c-a4de9daa667b req-851dbd37-26e5-41de-9410-56c7ead44726 service nova] Releasing lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.603552] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 969.628620] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 969.628948] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 969.629136] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.629328] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 969.629483] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.629631] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 969.629839] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 969.630025] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 969.630215] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 969.630386] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 969.630557] env[61905]: DEBUG nova.virt.hardware [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.631729] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dede2f-0ae8-4412-b6da-83f26062b31f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.639686] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f249fa-4ec0-4783-b2e5-af99db9c6eda {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.832166] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fd67a25b-aceb-4229-a49e-75fb397ad3fa tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "55a9190b-52f7-4bba-81b0-079e62537183" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.639s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.884827] env[61905]: DEBUG nova.network.neutron [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Successfully updated port: 799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.893834] env[61905]: DEBUG nova.compute.manager [req-892bdc55-e29f-4ae9-aaf9-f1aeb5eeb3d6 req-59b9ed1c-01ba-4310-93fa-8218634d4659 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Received event network-vif-plugged-799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 969.894061] env[61905]: DEBUG oslo_concurrency.lockutils [req-892bdc55-e29f-4ae9-aaf9-f1aeb5eeb3d6 req-59b9ed1c-01ba-4310-93fa-8218634d4659 service nova] Acquiring lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.894276] env[61905]: DEBUG oslo_concurrency.lockutils [req-892bdc55-e29f-4ae9-aaf9-f1aeb5eeb3d6 req-59b9ed1c-01ba-4310-93fa-8218634d4659 service nova] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.894506] env[61905]: DEBUG oslo_concurrency.lockutils [req-892bdc55-e29f-4ae9-aaf9-f1aeb5eeb3d6 req-59b9ed1c-01ba-4310-93fa-8218634d4659 service nova] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.894637] env[61905]: DEBUG nova.compute.manager [req-892bdc55-e29f-4ae9-aaf9-f1aeb5eeb3d6 req-59b9ed1c-01ba-4310-93fa-8218634d4659 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] No waiting events found dispatching network-vif-plugged-799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 969.894799] env[61905]: WARNING nova.compute.manager [req-892bdc55-e29f-4ae9-aaf9-f1aeb5eeb3d6 req-59b9ed1c-01ba-4310-93fa-8218634d4659 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Received unexpected event network-vif-plugged-799ace69-512b-4a5b-bc68-bb41890393e7 for instance with vm_state building and task_state spawning. [ 969.915955] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c4dfd5-2f68-9e5e-a2e6-85cd050527c2, 'name': SearchDatastore_Task, 'duration_secs': 0.007998} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.917027] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68969ce8-a829-42ce-a955-d09e2d95198c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.922628] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 969.922628] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528bd800-88f6-a483-5fe8-b4ad109e16cb" [ 969.922628] env[61905]: _type = "Task" [ 969.922628] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.930457] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528bd800-88f6-a483-5fe8-b4ad109e16cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.988434] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362819, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.389309] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.389502] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.389607] env[61905]: DEBUG nova.network.neutron [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.434721] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528bd800-88f6-a483-5fe8-b4ad109e16cb, 'name': SearchDatastore_Task, 'duration_secs': 0.009025} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.434998] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.435278] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 02a40a20-1506-48f2-bbd2-db62e5dfa166/02a40a20-1506-48f2-bbd2-db62e5dfa166.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.435536] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c1ec682-288e-46f7-85a6-b3d0bed0f0e3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.438640] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcee1c9-be73-407f-a1a0-d75b37dc15e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.447592] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c957aab7-6668-4dd1-a5e4-eb7832fed429 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.450502] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 970.450502] env[61905]: value = "task-1362820" [ 970.450502] env[61905]: _type = "Task" [ 970.450502] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.479817] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4fdeab-e72a-461b-8542-bd0c69c39a3c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.485232] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362820, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.495469] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2967c1d-e671-4202-8588-1534ac1946a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.499086] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362819, 'name': Rename_Task, 'duration_secs': 1.316282} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.499352] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.499881] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73dbaebc-38e6-45ab-9512-27bc79d6fd12 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.516205] env[61905]: DEBUG nova.compute.provider_tree [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.518594] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 970.518594] env[61905]: value = "task-1362821" [ 970.518594] env[61905]: _type = "Task" [ 970.518594] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.527253] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362821, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.728115] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.728516] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.823454] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "94df3cde-9330-41a1-bbec-1ce2a76551d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.823741] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.928445] env[61905]: DEBUG nova.network.neutron [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.960565] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362820, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.020185] env[61905]: DEBUG nova.scheduler.client.report [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 971.033370] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362821, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.079066] env[61905]: DEBUG nova.network.neutron [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [{"id": "799ace69-512b-4a5b-bc68-bb41890393e7", "address": "fa:16:3e:47:fb:4a", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap799ace69-51", "ovs_interfaceid": "799ace69-512b-4a5b-bc68-bb41890393e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.235836] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.235836] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 971.326385] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 971.463015] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362820, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.528335] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.528968] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 971.532097] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.186s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.532326] env[61905]: DEBUG nova.objects.instance [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lazy-loading 'resources' on Instance uuid 22b6d87c-08c5-492c-a963-f7ad6ef5db5b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.537048] env[61905]: DEBUG oslo_vmware.api [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362821, 'name': PowerOnVM_Task, 'duration_secs': 0.702136} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.537048] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.537249] env[61905]: INFO nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Took 10.02 seconds to spawn the instance on the hypervisor. [ 971.537429] env[61905]: DEBUG nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 971.538247] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e2958d-1610-4fc3-b191-0559aeb4fc2f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.582029] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.582223] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Instance network_info: |[{"id": "799ace69-512b-4a5b-bc68-bb41890393e7", "address": "fa:16:3e:47:fb:4a", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap799ace69-51", "ovs_interfaceid": "799ace69-512b-4a5b-bc68-bb41890393e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 971.582649] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:fb:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '799ace69-512b-4a5b-bc68-bb41890393e7', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.590439] env[61905]: DEBUG oslo.service.loopingcall [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.591298] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.591538] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4cd73b7-0960-4e93-a9f1-041ecefedaa6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.611683] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.611683] env[61905]: value = "task-1362822" [ 971.611683] env[61905]: _type = "Task" [ 971.611683] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.623353] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362822, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.850222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.918894] env[61905]: DEBUG nova.compute.manager [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Received event network-changed-799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.919119] env[61905]: DEBUG nova.compute.manager [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Refreshing instance network info cache due to event network-changed-799ace69-512b-4a5b-bc68-bb41890393e7. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 971.919369] env[61905]: DEBUG oslo_concurrency.lockutils [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] Acquiring lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.919574] env[61905]: DEBUG oslo_concurrency.lockutils [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] Acquired lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.919810] env[61905]: DEBUG nova.network.neutron [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Refreshing network info cache for port 799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.961501] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362820, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.481661} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.962055] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 02a40a20-1506-48f2-bbd2-db62e5dfa166/02a40a20-1506-48f2-bbd2-db62e5dfa166.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.962341] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.962579] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-966ca651-1978-4c4e-9291-10fe50ded583 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.968359] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 971.968359] env[61905]: value = "task-1362823" [ 971.968359] env[61905]: _type = "Task" [ 971.968359] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.976437] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362823, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.035190] env[61905]: DEBUG nova.compute.utils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 972.040940] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 972.040940] env[61905]: DEBUG nova.network.neutron [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.056270] env[61905]: INFO nova.compute.manager [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Took 15.03 seconds to build instance. [ 972.085199] env[61905]: DEBUG nova.policy [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ca67104cdbd4ac9be9a57bb19846925', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7af9072624d04f669e8183581e6ca50a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 972.122687] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362822, 'name': CreateVM_Task} progress is 25%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.177118] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a094fffc-4a9c-4869-9675-3612c06a1ced {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.185209] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c80555-4fb1-4f7d-a1c4-99bfb3a581ce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.217509] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea0ad7f-bea6-4ad8-aeda-c845e645f1f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.224757] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e212c001-9692-4d83-a188-6833b11976aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.237736] env[61905]: DEBUG nova.compute.provider_tree [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.357339] env[61905]: DEBUG nova.network.neutron [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Successfully created port: 478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 972.477598] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362823, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087413} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.478400] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.479752] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31515d2-f723-430b-bae5-cb7acefb6765 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.501039] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 02a40a20-1506-48f2-bbd2-db62e5dfa166/02a40a20-1506-48f2-bbd2-db62e5dfa166.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.501348] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f7bdda9-f28f-4e79-8fd2-25fa0a79e7bf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.524734] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 972.524734] env[61905]: value = "task-1362824" [ 972.524734] env[61905]: _type = "Task" [ 972.524734] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.534602] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.538428] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 972.561603] env[61905]: DEBUG oslo_concurrency.lockutils [None req-24c969fa-2cf9-4660-a133-a1731eab7d56 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.544s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.622623] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362822, 'name': CreateVM_Task, 'duration_secs': 0.603141} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.622845] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.623476] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.623669] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.624039] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.624687] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e0587eb-5f3b-4c69-b0de-fb15dda7e387 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.630887] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 972.630887] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520321ee-6d7d-91ef-0554-76b795696019" [ 972.630887] env[61905]: _type = "Task" [ 972.630887] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.638862] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520321ee-6d7d-91ef-0554-76b795696019, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.741077] env[61905]: DEBUG nova.scheduler.client.report [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 972.831732] env[61905]: DEBUG nova.network.neutron [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updated VIF entry in instance network info cache for port 799ace69-512b-4a5b-bc68-bb41890393e7. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.832117] env[61905]: DEBUG nova.network.neutron [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [{"id": "799ace69-512b-4a5b-bc68-bb41890393e7", "address": "fa:16:3e:47:fb:4a", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap799ace69-51", "ovs_interfaceid": "799ace69-512b-4a5b-bc68-bb41890393e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.019975] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.020421] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.020641] env[61905]: DEBUG nova.compute.manager [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 973.021846] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71ae736-475d-4a17-9faa-7fe5b080215a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.034566] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362824, 'name': ReconfigVM_Task, 'duration_secs': 0.284597} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.036285] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 02a40a20-1506-48f2-bbd2-db62e5dfa166/02a40a20-1506-48f2-bbd2-db62e5dfa166.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.036981] env[61905]: DEBUG nova.compute.manager [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61905) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 973.037586] env[61905]: DEBUG nova.objects.instance [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'flavor' on Instance uuid e810c9bb-ffb1-47f2-bc23-375520a2f50d {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.038931] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-292b96cb-bbe9-4af6-9163-b6871751bda0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.047318] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 973.047318] env[61905]: value = "task-1362825" [ 973.047318] env[61905]: _type = "Task" [ 973.047318] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.056124] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362825, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.143774] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]520321ee-6d7d-91ef-0554-76b795696019, 'name': SearchDatastore_Task, 'duration_secs': 0.00872} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.144227] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.144696] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.144944] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.145137] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.145334] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.145950] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a4de56a-62a0-4431-a88d-5c6239947bbc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.156878] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.157082] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.157808] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09cca831-a359-40be-864d-948789e8d7cd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.163157] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 973.163157] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52166868-d409-88de-7e9f-e2cc69e67349" [ 973.163157] env[61905]: _type = "Task" [ 973.163157] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.171530] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52166868-d409-88de-7e9f-e2cc69e67349, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.248982] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.251749] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.340s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.252010] env[61905]: DEBUG nova.objects.instance [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lazy-loading 'resources' on Instance uuid 63eb2219-fea2-4af0-90d2-e8d9ac53a138 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.273583] env[61905]: INFO nova.scheduler.client.report [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted allocations for instance 22b6d87c-08c5-492c-a963-f7ad6ef5db5b [ 973.335441] env[61905]: DEBUG oslo_concurrency.lockutils [req-b7312df8-4be9-47ec-a0a0-5ab9784a27ae req-4c29df8d-b872-4ef3-a844-1e1da28b19b5 service nova] Releasing lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.544771] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.545062] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fe32d9d-4327-4552-9b83-2d689cf754db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.547521] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 973.555364] env[61905]: DEBUG oslo_vmware.api [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 973.555364] env[61905]: value = "task-1362826" [ 973.555364] env[61905]: _type = "Task" [ 973.555364] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.558736] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362825, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.567261] env[61905]: DEBUG oslo_vmware.api [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.574847] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 973.575330] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 973.575645] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 973.577027] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 973.577935] env[61905]: DEBUG nova.virt.hardware [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 973.578827] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122f69dd-502b-4e73-9329-a1c22d661985 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.587284] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218073fe-1281-4a3e-8860-c9139875de1a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.676609] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52166868-d409-88de-7e9f-e2cc69e67349, 'name': SearchDatastore_Task, 'duration_secs': 0.019081} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.677445] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1f8fa09-2fcd-46e8-b2d2-fe24a41fb8e1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.684034] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 973.684034] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5231487c-d740-6a0b-6009-49a7f981428e" [ 973.684034] env[61905]: _type = "Task" [ 973.684034] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.691385] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5231487c-d740-6a0b-6009-49a7f981428e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.784295] env[61905]: DEBUG oslo_concurrency.lockutils [None req-939e1e79-16db-4c35-8a4c-1db6893bdf4a tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "22b6d87c-08c5-492c-a963-f7ad6ef5db5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.426s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.881401] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9182cd-454a-4719-84df-16dc391ce8fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.888906] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1348ee3-1610-4567-818f-de936c2aa05d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.917705] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50402490-acd6-4d17-ae22-d0c00394a2e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.924662] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32321e85-a9ec-448c-98f1-aaf0e33dd467 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.940131] env[61905]: DEBUG nova.compute.provider_tree [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.059202] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362825, 'name': Rename_Task, 'duration_secs': 0.658333} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.063113] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.064129] env[61905]: DEBUG nova.compute.manager [req-0438765a-3d04-4c5d-9fda-e4263a79e5bd req-4b69be0f-c6d0-4e6b-8cdf-ffd4db0fb1e3 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Received event network-vif-plugged-478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.064402] env[61905]: DEBUG oslo_concurrency.lockutils [req-0438765a-3d04-4c5d-9fda-e4263a79e5bd req-4b69be0f-c6d0-4e6b-8cdf-ffd4db0fb1e3 service nova] Acquiring lock "c79ae168-cf98-4b0a-a55d-a39d66f82462-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.064535] env[61905]: DEBUG oslo_concurrency.lockutils [req-0438765a-3d04-4c5d-9fda-e4263a79e5bd req-4b69be0f-c6d0-4e6b-8cdf-ffd4db0fb1e3 service nova] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.064701] env[61905]: DEBUG oslo_concurrency.lockutils [req-0438765a-3d04-4c5d-9fda-e4263a79e5bd req-4b69be0f-c6d0-4e6b-8cdf-ffd4db0fb1e3 service nova] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.064865] env[61905]: DEBUG nova.compute.manager [req-0438765a-3d04-4c5d-9fda-e4263a79e5bd req-4b69be0f-c6d0-4e6b-8cdf-ffd4db0fb1e3 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] No waiting events found dispatching network-vif-plugged-478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 974.065042] env[61905]: WARNING nova.compute.manager [req-0438765a-3d04-4c5d-9fda-e4263a79e5bd req-4b69be0f-c6d0-4e6b-8cdf-ffd4db0fb1e3 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Received unexpected event network-vif-plugged-478465ef-4d52-4a2c-8e3c-befc6b84536c for instance with vm_state building and task_state spawning. [ 974.065568] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be931436-fb7a-4844-a24b-c404c238d19e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.072749] env[61905]: DEBUG oslo_vmware.api [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362826, 'name': PowerOffVM_Task, 'duration_secs': 0.299365} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.073806] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.073988] env[61905]: DEBUG nova.compute.manager [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 974.074301] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 974.074301] env[61905]: value = "task-1362827" [ 974.074301] env[61905]: _type = "Task" [ 974.074301] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.074968] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bcfae0-a983-44d4-96d7-01aed3dc2b1b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.085106] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.170743] env[61905]: DEBUG nova.network.neutron [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Successfully updated port: 478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.194772] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5231487c-d740-6a0b-6009-49a7f981428e, 'name': SearchDatastore_Task, 'duration_secs': 0.011761} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.195039] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.195303] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9/020dc38a-d4ea-41fa-a3aa-3eb63b3516d9.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.195562] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3052acbc-2072-43f5-a1e1-4da317efe411 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.202502] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 974.202502] env[61905]: value = "task-1362828" [ 974.202502] env[61905]: _type = "Task" [ 974.202502] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.210386] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.443445] env[61905]: DEBUG nova.scheduler.client.report [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.586967] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362827, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.591957] env[61905]: DEBUG oslo_concurrency.lockutils [None req-fa03a932-f66c-42a6-8c3c-e0fe270844a5 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.572s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.675532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "refresh_cache-c79ae168-cf98-4b0a-a55d-a39d66f82462" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.675722] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "refresh_cache-c79ae168-cf98-4b0a-a55d-a39d66f82462" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.675826] env[61905]: DEBUG nova.network.neutron [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.713182] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45008} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.713467] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9/020dc38a-d4ea-41fa-a3aa-3eb63b3516d9.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.713678] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.713936] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b8bb8d6-cb77-49fe-8700-50845631848e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.720295] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 974.720295] env[61905]: value = "task-1362829" [ 974.720295] env[61905]: _type = "Task" [ 974.720295] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.729942] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362829, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.759309] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Didn't find any instances for network info cache update. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 974.759527] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.759774] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.759962] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.760272] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.760557] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.760632] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.760764] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 974.760907] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 974.950087] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.698s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.954203] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.104s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.955645] env[61905]: INFO nova.compute.claims [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.977695] env[61905]: INFO nova.scheduler.client.report [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleted allocations for instance 63eb2219-fea2-4af0-90d2-e8d9ac53a138 [ 975.087475] env[61905]: DEBUG oslo_vmware.api [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362827, 'name': PowerOnVM_Task, 'duration_secs': 0.670681} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.087786] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 975.087890] env[61905]: INFO nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Took 9.54 seconds to spawn the instance on the hypervisor. [ 975.088089] env[61905]: DEBUG nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 975.088857] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c52455-9566-443a-81d8-a2eb55edc526 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.207137] env[61905]: DEBUG nova.network.neutron [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.230909] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362829, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069521} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.231284] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.232064] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1be5fb-658c-40fb-855c-f50b3441f35f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.281542] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9/020dc38a-d4ea-41fa-a3aa-3eb63b3516d9.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.281542] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fcdf998-80d0-4ede-aaa4-ee413f7b7a48 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.281542] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.281542] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 975.281542] env[61905]: value = "task-1362830" [ 975.281542] env[61905]: _type = "Task" [ 975.281542] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.288872] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.421617] env[61905]: DEBUG nova.network.neutron [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Updating instance_info_cache with network_info: [{"id": "478465ef-4d52-4a2c-8e3c-befc6b84536c", "address": "fa:16:3e:ce:c5:99", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap478465ef-4d", "ovs_interfaceid": "478465ef-4d52-4a2c-8e3c-befc6b84536c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.486241] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4220b8b1-b36c-4666-969f-7e305e80ae4d tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "63eb2219-fea2-4af0-90d2-e8d9ac53a138" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.571s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.499687] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.500330] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.500330] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.500469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.500624] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.502710] env[61905]: INFO nova.compute.manager [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Terminating instance [ 975.504690] env[61905]: DEBUG nova.compute.manager [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 975.504851] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.505672] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e905a40d-a172-40ed-a54f-902d5c7d10d1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.513735] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.513961] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e388ba20-de73-45af-8f76-90ce82ee7443 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.591201] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.591425] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.591608] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleting the datastore file [datastore2] e810c9bb-ffb1-47f2-bc23-375520a2f50d {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.591876] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c990dc1-5856-4c03-a1ae-4b3725595d8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.598128] env[61905]: DEBUG oslo_vmware.api [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 975.598128] env[61905]: value = "task-1362832" [ 975.598128] env[61905]: _type = "Task" [ 975.598128] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.607094] env[61905]: INFO nova.compute.manager [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Took 16.75 seconds to build instance. [ 975.611375] env[61905]: DEBUG oslo_vmware.api [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.791433] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362830, 'name': ReconfigVM_Task, 'duration_secs': 0.492821} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.792789] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9/020dc38a-d4ea-41fa-a3aa-3eb63b3516d9.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.795511] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6486b31a-8fb1-4f62-b6d8-cc100f3351df {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.801997] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 975.801997] env[61905]: value = "task-1362833" [ 975.801997] env[61905]: _type = "Task" [ 975.801997] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.810359] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362833, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.924969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "refresh_cache-c79ae168-cf98-4b0a-a55d-a39d66f82462" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.925336] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Instance network_info: |[{"id": "478465ef-4d52-4a2c-8e3c-befc6b84536c", "address": "fa:16:3e:ce:c5:99", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap478465ef-4d", "ovs_interfaceid": "478465ef-4d52-4a2c-8e3c-befc6b84536c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 975.925831] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:c5:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b151a0c-aa46-4d21-9ef5-c09cf350b19c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '478465ef-4d52-4a2c-8e3c-befc6b84536c', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 975.933952] env[61905]: DEBUG oslo.service.loopingcall [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.934569] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 975.934871] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7512f5c4-4eac-4f4e-b5ff-0312475fe459 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.953847] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 975.953847] env[61905]: value = "task-1362834" [ 975.953847] env[61905]: _type = "Task" [ 975.953847] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.960914] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362834, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.071942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fc88ff-8f92-4fc0-bed3-65a3ffcb953d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.079588] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fd9e7f-e2b1-43e2-bd12-27e3d8f23d5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.109627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-59ae1534-a504-4100-bab2-9da998dde5a0 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.266s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.115432] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a761d453-ebf3-4132-ada9-e6a0d482953c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.118951] env[61905]: DEBUG nova.compute.manager [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Received event network-changed-478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.119158] env[61905]: DEBUG nova.compute.manager [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Refreshing instance network info cache due to event network-changed-478465ef-4d52-4a2c-8e3c-befc6b84536c. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 976.119385] env[61905]: DEBUG oslo_concurrency.lockutils [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] Acquiring lock "refresh_cache-c79ae168-cf98-4b0a-a55d-a39d66f82462" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.119630] env[61905]: DEBUG oslo_concurrency.lockutils [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] Acquired lock "refresh_cache-c79ae168-cf98-4b0a-a55d-a39d66f82462" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.119827] env[61905]: DEBUG nova.network.neutron [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Refreshing network info cache for port 478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.129506] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64563dc2-97ab-4062-a444-69b8c06e3ce5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.133483] env[61905]: DEBUG oslo_vmware.api [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161962} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.134442] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.134635] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.134811] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.134981] env[61905]: INFO nova.compute.manager [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Took 0.63 seconds to destroy the instance on the hypervisor. [ 976.135235] env[61905]: DEBUG oslo.service.loopingcall [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.135962] env[61905]: DEBUG nova.compute.manager [-] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 976.136076] env[61905]: DEBUG nova.network.neutron [-] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.147456] env[61905]: DEBUG nova.compute.provider_tree [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.298065] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.298382] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.298596] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.298781] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.298956] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.301688] env[61905]: INFO nova.compute.manager [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Terminating instance [ 976.306735] env[61905]: DEBUG nova.compute.manager [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 976.306935] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 976.307668] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f986959e-44e4-4609-a36f-4395e5481ebc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.315012] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.317737] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b74d783-78b3-4b4f-9817-ef1a680bd87e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.319160] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362833, 'name': Rename_Task, 'duration_secs': 0.130015} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.319418] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.320108] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d109730-2513-49fb-97f1-db355fb54620 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.324629] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 976.324629] env[61905]: value = "task-1362835" [ 976.324629] env[61905]: _type = "Task" [ 976.324629] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.328475] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 976.328475] env[61905]: value = "task-1362836" [ 976.328475] env[61905]: _type = "Task" [ 976.328475] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.334781] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.339579] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.466440] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362834, 'name': CreateVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.652110] env[61905]: DEBUG nova.scheduler.client.report [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.749235] env[61905]: DEBUG nova.compute.manager [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Received event network-changed-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.749422] env[61905]: DEBUG nova.compute.manager [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Refreshing instance network info cache due to event network-changed-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 976.749600] env[61905]: DEBUG oslo_concurrency.lockutils [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] Acquiring lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.749745] env[61905]: DEBUG oslo_concurrency.lockutils [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] Acquired lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.749901] env[61905]: DEBUG nova.network.neutron [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Refreshing network info cache for port 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.824801] env[61905]: DEBUG nova.network.neutron [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Updated VIF entry in instance network info cache for port 478465ef-4d52-4a2c-8e3c-befc6b84536c. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.825173] env[61905]: DEBUG nova.network.neutron [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Updating instance_info_cache with network_info: [{"id": "478465ef-4d52-4a2c-8e3c-befc6b84536c", "address": "fa:16:3e:ce:c5:99", "network": {"id": "8452fc02-ebd1-4382-8f43-f1de9872f65d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1224741998-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7af9072624d04f669e8183581e6ca50a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b151a0c-aa46-4d21-9ef5-c09cf350b19c", "external-id": "nsx-vlan-transportzone-343", "segmentation_id": 343, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap478465ef-4d", "ovs_interfaceid": "478465ef-4d52-4a2c-8e3c-befc6b84536c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.837785] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362835, 'name': PowerOffVM_Task, 'duration_secs': 0.237428} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.840786] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 976.840966] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 976.841470] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362836, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.841681] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc048855-a6d1-45fb-953c-3a46197e2099 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.889479] env[61905]: DEBUG nova.network.neutron [-] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.965576] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362834, 'name': CreateVM_Task, 'duration_secs': 0.612796} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.965746] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 976.966784] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.966969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.967359] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 976.967634] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ef690be-d2c4-47d0-a2e8-f50013390d37 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.972221] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 976.972221] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a1afdc-a241-aac0-7481-2cea13b853fd" [ 976.972221] env[61905]: _type = "Task" [ 976.972221] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.982856] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a1afdc-a241-aac0-7481-2cea13b853fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.157154] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.157717] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 977.160438] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.887s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.160627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.160777] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 977.162125] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46dedbc-639b-412c-a214-3079e12a5f48 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.170652] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816d5c31-75f8-4d8f-b8ad-28403ae7c186 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.184985] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e89365-2dd9-4f9c-b4f7-728d5e808a70 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.191999] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d20594-d872-4a17-9fe8-fe82efebd497 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.220827] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180205MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 977.220979] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.221256] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.331384] env[61905]: DEBUG oslo_concurrency.lockutils [req-e18d1f2d-2d9d-4eec-bb1d-5a24060291c7 req-441d6a8a-83be-40ae-9bda-12b76f9ed495 service nova] Releasing lock "refresh_cache-c79ae168-cf98-4b0a-a55d-a39d66f82462" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.340582] env[61905]: DEBUG oslo_vmware.api [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362836, 'name': PowerOnVM_Task, 'duration_secs': 0.981714} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.340857] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.341087] env[61905]: INFO nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Took 7.74 seconds to spawn the instance on the hypervisor. [ 977.341289] env[61905]: DEBUG nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.342203] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1e17c5-48ba-4afe-a580-893416bf4c30 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.392410] env[61905]: INFO nova.compute.manager [-] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Took 1.26 seconds to deallocate network for instance. [ 977.470336] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.470617] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.470812] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleting the datastore file [datastore1] 0f7ccb34-cb14-4b21-ae61-b066427d400e {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.471188] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9602553f-2d8b-4c8c-bbbb-e0a1d9f4738a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.483726] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a1afdc-a241-aac0-7481-2cea13b853fd, 'name': SearchDatastore_Task, 'duration_secs': 0.013352} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.485502] env[61905]: DEBUG nova.network.neutron [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updated VIF entry in instance network info cache for port 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.485820] env[61905]: DEBUG nova.network.neutron [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating instance_info_cache with network_info: [{"id": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "address": "fa:16:3e:bb:0a:52", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d82d8e0-26", "ovs_interfaceid": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.487045] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.487280] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.487516] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.487651] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.487820] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.488156] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for the task: (returnval){ [ 977.488156] env[61905]: value = "task-1362838" [ 977.488156] env[61905]: _type = "Task" [ 977.488156] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.488544] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-592c18e9-f427-40e5-ae03-9d45c42e8f86 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.499561] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.508754] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.508929] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.509652] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b56dddb-e9d7-4a65-b0aa-b3a354408afc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.515027] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 977.515027] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525cfecd-58c9-e6f6-186d-7113f49d1131" [ 977.515027] env[61905]: _type = "Task" [ 977.515027] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.521987] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525cfecd-58c9-e6f6-186d-7113f49d1131, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.663406] env[61905]: DEBUG nova.compute.utils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 977.665027] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 977.665467] env[61905]: DEBUG nova.network.neutron [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 977.702573] env[61905]: DEBUG nova.policy [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 977.863482] env[61905]: INFO nova.compute.manager [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Took 18.03 seconds to build instance. [ 977.898607] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.989785] env[61905]: DEBUG oslo_concurrency.lockutils [req-277a6c03-0dca-4908-af2c-a91d5dd49678 req-0a2c21f0-4257-41c2-8d65-64e73cdae1d0 service nova] Releasing lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.001019] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.025524] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525cfecd-58c9-e6f6-186d-7113f49d1131, 'name': SearchDatastore_Task, 'duration_secs': 0.00893} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.026398] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e10a8fad-5104-415d-b578-d4a7b0e131e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.032220] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 978.032220] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5259b99d-50f3-03dd-6978-2e1438974467" [ 978.032220] env[61905]: _type = "Task" [ 978.032220] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.040160] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5259b99d-50f3-03dd-6978-2e1438974467, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.114186] env[61905]: DEBUG nova.compute.manager [req-b24d7841-4460-4027-be74-555e94f32fb6 req-3ad929e2-0f2d-4052-89db-079bb84dd99d service nova] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Received event network-vif-deleted-356a18e0-71dc-40e6-be4e-a823167086ef {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 978.168048] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 978.218042] env[61905]: DEBUG nova.network.neutron [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Successfully created port: d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.254919] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 0f7ccb34-cb14-4b21-ae61-b066427d400e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 7b0db0a2-c990-4160-9be8-018239425114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 1502df44-9166-4ce8-9117-a57e7be2d299 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e810c9bb-ffb1-47f2-bc23-375520a2f50d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 02a40a20-1506-48f2-bbd2-db62e5dfa166 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c79ae168-cf98-4b0a-a55d-a39d66f82462 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 94df3cde-9330-41a1-bbec-1ce2a76551d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 978.257092] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 978.368711] env[61905]: DEBUG oslo_concurrency.lockutils [None req-8485e5ff-e460-4429-9c20-0e31db4e7f84 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.538s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.383342] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5c04b5-ce32-4ff0-b38e-ea72f773e6a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.392322] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef2eb37-4150-4e13-8622-df4fad6aaf82 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.426568] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcaf97c-0ffc-4020-8ca8-b110fcd087bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.439426] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d81f75-804f-47f8-a175-fcbe2d7226c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.453023] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.502172] env[61905]: DEBUG oslo_vmware.api [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Task: {'id': task-1362838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.563578} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.502511] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.502711] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.502886] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.503411] env[61905]: INFO nova.compute.manager [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Took 2.20 seconds to destroy the instance on the hypervisor. [ 978.503411] env[61905]: DEBUG oslo.service.loopingcall [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 978.503508] env[61905]: DEBUG nova.compute.manager [-] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 978.503583] env[61905]: DEBUG nova.network.neutron [-] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 978.544633] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5259b99d-50f3-03dd-6978-2e1438974467, 'name': SearchDatastore_Task, 'duration_secs': 0.01272} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.545015] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.545327] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] c79ae168-cf98-4b0a-a55d-a39d66f82462/c79ae168-cf98-4b0a-a55d-a39d66f82462.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.545625] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d6354f1-cc9f-4b8f-bfa5-939534a09dea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.555247] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 978.555247] env[61905]: value = "task-1362839" [ 978.555247] env[61905]: _type = "Task" [ 978.555247] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.562421] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.956571] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.064376] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362839, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.177193] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 979.203872] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.204164] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.204334] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.204516] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.204673] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.204892] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.205056] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.205225] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.205411] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.206051] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.206051] env[61905]: DEBUG nova.virt.hardware [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.206693] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b87c2e2-01af-4c75-bca6-0aea018249c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.215257] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05fb14b-91d3-464a-9fc4-8638ac7309db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.258465] env[61905]: DEBUG nova.network.neutron [-] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.462241] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 979.462469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.241s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.462747] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.564s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.462966] env[61905]: DEBUG nova.objects.instance [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'resources' on Instance uuid e810c9bb-ffb1-47f2-bc23-375520a2f50d {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.564954] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362839, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.664317} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.565243] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] c79ae168-cf98-4b0a-a55d-a39d66f82462/c79ae168-cf98-4b0a-a55d-a39d66f82462.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.565455] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.565706] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63f58ea0-09fe-45ec-8f62-b205d61c484a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.572692] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 979.572692] env[61905]: value = "task-1362840" [ 979.572692] env[61905]: _type = "Task" [ 979.572692] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.580630] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.760899] env[61905]: INFO nova.compute.manager [-] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Took 1.26 seconds to deallocate network for instance. [ 979.826432] env[61905]: DEBUG nova.compute.manager [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Stashing vm_state: active {{(pid=61905) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 979.848555] env[61905]: DEBUG nova.network.neutron [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Successfully updated port: d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.082349] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061804} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.084854] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 980.085892] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaed809-8aea-472b-ae55-95df6e572da3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.108198] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] c79ae168-cf98-4b0a-a55d-a39d66f82462/c79ae168-cf98-4b0a-a55d-a39d66f82462.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.109390] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d4106b0-54bc-4dda-becb-47c5f506fed8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.125089] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3a03b3-922e-47fe-b2e8-84c3668334ad {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.131870] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294b8116-2954-442c-ab40-1442e8d8b557 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.136547] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 980.136547] env[61905]: value = "task-1362841" [ 980.136547] env[61905]: _type = "Task" [ 980.136547] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.168230] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d85e891-dac9-487b-952c-d53d0563901b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.172086] env[61905]: DEBUG nova.compute.manager [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Received event network-vif-deleted-7c93f7f3-4702-4071-8e42-c0627b146af5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.172288] env[61905]: DEBUG nova.compute.manager [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Received event network-vif-plugged-d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.172511] env[61905]: DEBUG oslo_concurrency.lockutils [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] Acquiring lock "94df3cde-9330-41a1-bbec-1ce2a76551d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.172736] env[61905]: DEBUG oslo_concurrency.lockutils [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.172908] env[61905]: DEBUG oslo_concurrency.lockutils [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.173089] env[61905]: DEBUG nova.compute.manager [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] No waiting events found dispatching network-vif-plugged-d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 980.173257] env[61905]: WARNING nova.compute.manager [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Received unexpected event network-vif-plugged-d1579491-6761-4bf5-83f4-4528285059a2 for instance with vm_state building and task_state spawning. [ 980.173422] env[61905]: DEBUG nova.compute.manager [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Received event network-changed-d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 980.173682] env[61905]: DEBUG nova.compute.manager [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Refreshing instance network info cache due to event network-changed-d1579491-6761-4bf5-83f4-4528285059a2. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 980.173880] env[61905]: DEBUG oslo_concurrency.lockutils [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] Acquiring lock "refresh_cache-94df3cde-9330-41a1-bbec-1ce2a76551d6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.174029] env[61905]: DEBUG oslo_concurrency.lockutils [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] Acquired lock "refresh_cache-94df3cde-9330-41a1-bbec-1ce2a76551d6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.174188] env[61905]: DEBUG nova.network.neutron [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Refreshing network info cache for port d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.178789] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.184697] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9c37c8-28ef-4f8f-98df-da63c2d643a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.199536] env[61905]: DEBUG nova.compute.provider_tree [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.269068] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.346377] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.351215] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-94df3cde-9330-41a1-bbec-1ce2a76551d6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.647492] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.702013] env[61905]: DEBUG nova.scheduler.client.report [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 980.708934] env[61905]: DEBUG nova.network.neutron [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 980.781708] env[61905]: DEBUG nova.network.neutron [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.147259] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.207266] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.209460] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.941s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.209691] env[61905]: DEBUG nova.objects.instance [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lazy-loading 'resources' on Instance uuid 0f7ccb34-cb14-4b21-ae61-b066427d400e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.229141] env[61905]: INFO nova.scheduler.client.report [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocations for instance e810c9bb-ffb1-47f2-bc23-375520a2f50d [ 981.283943] env[61905]: DEBUG oslo_concurrency.lockutils [req-76f26e15-4e1d-4bf8-9396-e1e51c168f86 req-282caf39-7b49-412a-b0f2-7a328b905a3d service nova] Releasing lock "refresh_cache-94df3cde-9330-41a1-bbec-1ce2a76551d6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.284354] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-94df3cde-9330-41a1-bbec-1ce2a76551d6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.284547] env[61905]: DEBUG nova.network.neutron [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.648557] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362841, 'name': ReconfigVM_Task, 'duration_secs': 1.174914} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.648862] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Reconfigured VM instance instance-00000060 to attach disk [datastore2] c79ae168-cf98-4b0a-a55d-a39d66f82462/c79ae168-cf98-4b0a-a55d-a39d66f82462.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.649558] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a9fd4e4-7cdf-4e0a-bf6b-4326e9d23f77 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.655811] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 981.655811] env[61905]: value = "task-1362842" [ 981.655811] env[61905]: _type = "Task" [ 981.655811] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.663263] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362842, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.736745] env[61905]: DEBUG oslo_concurrency.lockutils [None req-93e9b9d9-bcb8-48cd-a30c-c9caf3b98548 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "e810c9bb-ffb1-47f2-bc23-375520a2f50d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.237s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.815981] env[61905]: DEBUG nova.network.neutron [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.832582] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5122c43a-0d6d-4b16-9de0-a74a171ea5a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.841276] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33d8130-09f2-4647-ad53-e465927fecbc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.873291] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fb9955-dc5a-45d6-ae56-7d1afdc9ffbc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.880671] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71057113-a60a-4069-bd5d-ae6193e2d925 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.893720] env[61905]: DEBUG nova.compute.provider_tree [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.010243] env[61905]: DEBUG nova.network.neutron [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Updating instance_info_cache with network_info: [{"id": "d1579491-6761-4bf5-83f4-4528285059a2", "address": "fa:16:3e:44:a6:9c", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1579491-67", "ovs_interfaceid": "d1579491-6761-4bf5-83f4-4528285059a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.171721] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362842, 'name': Rename_Task, 'duration_secs': 0.169876} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.171981] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 982.172240] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67785aa1-b598-4a74-abe4-a492ecb5ca82 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.178576] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 982.178576] env[61905]: value = "task-1362843" [ 982.178576] env[61905]: _type = "Task" [ 982.178576] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.185586] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.397127] env[61905]: DEBUG nova.scheduler.client.report [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.506451] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "fb417a53-b6df-4566-87f2-bd56dafd789c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.506689] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.512852] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-94df3cde-9330-41a1-bbec-1ce2a76551d6" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.513260] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Instance network_info: |[{"id": "d1579491-6761-4bf5-83f4-4528285059a2", "address": "fa:16:3e:44:a6:9c", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1579491-67", "ovs_interfaceid": "d1579491-6761-4bf5-83f4-4528285059a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 982.513559] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:a6:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1579491-6761-4bf5-83f4-4528285059a2', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.521160] env[61905]: DEBUG oslo.service.loopingcall [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.522312] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.522577] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a5228fe-f8d2-4dfd-8530-82e3cd4b14bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.543259] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.543259] env[61905]: value = "task-1362844" [ 982.543259] env[61905]: _type = "Task" [ 982.543259] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.551150] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362844, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.690048] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362843, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.902618] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.904879] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.559s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.924390] env[61905]: INFO nova.scheduler.client.report [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Deleted allocations for instance 0f7ccb34-cb14-4b21-ae61-b066427d400e [ 983.009015] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 983.055565] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362844, 'name': CreateVM_Task, 'duration_secs': 0.334994} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.055831] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.056618] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.056916] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.058467] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.058833] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-540df032-338c-4abc-819f-0bb29235bd34 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.064343] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 983.064343] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522c91c8-33d3-6763-4eb9-86334314ee6a" [ 983.064343] env[61905]: _type = "Task" [ 983.064343] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.072838] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522c91c8-33d3-6763-4eb9-86334314ee6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.190179] env[61905]: DEBUG oslo_vmware.api [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362843, 'name': PowerOnVM_Task, 'duration_secs': 0.654875} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.190513] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 983.190726] env[61905]: INFO nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Took 9.64 seconds to spawn the instance on the hypervisor. [ 983.190912] env[61905]: DEBUG nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 983.191697] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7aa41e1-75ff-4dbc-8d0a-43e9195bedf7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.412992] env[61905]: INFO nova.compute.claims [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.431969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49aaeb53-a09b-4f41-9b2d-9a57b2a219c3 tempest-AttachInterfacesTestJSON-2099600748 tempest-AttachInterfacesTestJSON-2099600748-project-member] Lock "0f7ccb34-cb14-4b21-ae61-b066427d400e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.133s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.531227] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.574255] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522c91c8-33d3-6763-4eb9-86334314ee6a, 'name': SearchDatastore_Task, 'duration_secs': 0.018761} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.574556] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.574783] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.575024] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.575179] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.575387] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.575629] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6caf614-d0dc-4a81-be1d-fe8004f4c2b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.585229] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.585401] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.586080] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3f1ac28-e5b0-4aec-984b-ad80fdfcd76c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.590854] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 983.590854] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5213b867-bd03-4a96-5ce5-2c22fc5748a1" [ 983.590854] env[61905]: _type = "Task" [ 983.590854] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.597731] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5213b867-bd03-4a96-5ce5-2c22fc5748a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.708173] env[61905]: INFO nova.compute.manager [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Took 19.84 seconds to build instance. [ 983.918594] env[61905]: INFO nova.compute.resource_tracker [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating resource usage from migration 1a72af17-8baa-4c03-a33e-60d9cd0218a9 [ 984.044929] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189fb801-f312-4c60-8d93-2c2085f9d784 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.055868] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0611ec33-b8f6-4374-80a0-a10ae1f6e8f8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.086070] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c2ecb7-5187-4ad0-bdcf-b24a4be829a6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.096541] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26477889-ef86-480a-8b7c-5655582bc26c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.105399] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5213b867-bd03-4a96-5ce5-2c22fc5748a1, 'name': SearchDatastore_Task, 'duration_secs': 0.027067} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.115173] env[61905]: DEBUG nova.compute.provider_tree [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.116587] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2258759a-fe9b-41ad-8a7c-c904e7341316 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.121909] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 984.121909] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5233176e-88de-9026-92d5-281fb0c55652" [ 984.121909] env[61905]: _type = "Task" [ 984.121909] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.129725] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5233176e-88de-9026-92d5-281fb0c55652, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.210539] env[61905]: DEBUG oslo_concurrency.lockutils [None req-be068355-0510-4426-9cbc-a163886787a8 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.349s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.620097] env[61905]: DEBUG nova.scheduler.client.report [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.633960] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5233176e-88de-9026-92d5-281fb0c55652, 'name': SearchDatastore_Task, 'duration_secs': 0.009477} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.634453] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.634706] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 94df3cde-9330-41a1-bbec-1ce2a76551d6/94df3cde-9330-41a1-bbec-1ce2a76551d6.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.634961] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c12d25d9-43b7-4a42-b1b0-70e777930bca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.642756] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 984.642756] env[61905]: value = "task-1362845" [ 984.642756] env[61905]: _type = "Task" [ 984.642756] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.652242] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362845, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.128239] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.223s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.128580] env[61905]: INFO nova.compute.manager [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Migrating [ 985.135572] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.604s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.137092] env[61905]: INFO nova.compute.claims [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.157579] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362845, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480182} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.158580] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 94df3cde-9330-41a1-bbec-1ce2a76551d6/94df3cde-9330-41a1-bbec-1ce2a76551d6.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.158920] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.159261] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7cc28a9-2622-4072-91b7-7f4a3cafea4d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.166589] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 985.166589] env[61905]: value = "task-1362846" [ 985.166589] env[61905]: _type = "Task" [ 985.166589] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.176550] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362846, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.649272] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.650040] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.650040] env[61905]: DEBUG nova.network.neutron [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.677938] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362846, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068113} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.678234] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.679021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8996776e-ae76-476c-acba-e1371a1f3bd3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.701648] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 94df3cde-9330-41a1-bbec-1ce2a76551d6/94df3cde-9330-41a1-bbec-1ce2a76551d6.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.702347] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-495626fd-b0f2-480a-9ac0-6714a31bea09 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.723281] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 985.723281] env[61905]: value = "task-1362847" [ 985.723281] env[61905]: _type = "Task" [ 985.723281] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.731070] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362847, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.905559] env[61905]: DEBUG nova.compute.manager [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 985.906499] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28693a2-93bb-46ae-b722-0b440c9ec846 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.233295] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.280995] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371bceae-362c-4d8c-a5af-6f8bda80bb9c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.288356] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d490540f-7c3d-47b4-b9af-3d68dcc29f5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.320042] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0568a82-7967-4342-98fb-5bd41ad7007d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.327836] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d63b4d-3c1d-4bf2-9a29-7ec4c1b9971a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.342539] env[61905]: DEBUG nova.compute.provider_tree [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.416974] env[61905]: INFO nova.compute.manager [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] instance snapshotting [ 986.419942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df78eb8c-b30e-4f70-9b74-67758aab52b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.438269] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ce8562-5e5e-40c4-bfc8-62933c28f804 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.507352] env[61905]: DEBUG nova.network.neutron [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [{"id": "799ace69-512b-4a5b-bc68-bb41890393e7", "address": "fa:16:3e:47:fb:4a", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap799ace69-51", "ovs_interfaceid": "799ace69-512b-4a5b-bc68-bb41890393e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.733337] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362847, 'name': ReconfigVM_Task, 'duration_secs': 0.913301} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.733658] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 94df3cde-9330-41a1-bbec-1ce2a76551d6/94df3cde-9330-41a1-bbec-1ce2a76551d6.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.734287] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f256149-a870-45c6-9291-938eb3e2a679 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.740728] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 986.740728] env[61905]: value = "task-1362848" [ 986.740728] env[61905]: _type = "Task" [ 986.740728] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.748676] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362848, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.845839] env[61905]: DEBUG nova.scheduler.client.report [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 986.952883] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Creating Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 986.953206] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a7c7bb20-0dce-4cac-9fa6-68f97451fd8c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.961848] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 986.961848] env[61905]: value = "task-1362849" [ 986.961848] env[61905]: _type = "Task" [ 986.961848] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.970723] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362849, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.006595] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.252378] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362848, 'name': Rename_Task, 'duration_secs': 0.317899} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.252856] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 987.253281] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6dc17be1-5b96-4bbe-89c2-c6500fab2a1c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.260041] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 987.260041] env[61905]: value = "task-1362850" [ 987.260041] env[61905]: _type = "Task" [ 987.260041] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.270032] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.354319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.354319] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 987.474845] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362849, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.771542] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362850, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.858673] env[61905]: DEBUG nova.compute.utils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.860110] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 987.860307] env[61905]: DEBUG nova.network.neutron [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 987.910287] env[61905]: DEBUG nova.policy [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f27bcad7ab3b4e0e98065f24300f9425', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30edd7bc94ee492cb7f4e4f388e45b8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 987.973451] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362849, 'name': CreateSnapshot_Task, 'duration_secs': 0.707373} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.973793] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Created Snapshot of the VM instance {{(pid=61905) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 987.974586] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b3c43e-76cb-4d8d-a5b4-020380782d96 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.179041] env[61905]: DEBUG nova.network.neutron [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Successfully created port: 8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.274310] env[61905]: DEBUG oslo_vmware.api [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362850, 'name': PowerOnVM_Task, 'duration_secs': 0.516035} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.274823] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 988.275052] env[61905]: INFO nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Took 9.10 seconds to spawn the instance on the hypervisor. [ 988.275279] env[61905]: DEBUG nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 988.276603] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f04917-9640-472d-a672-5185e37fc206 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.365815] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 988.494605] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Creating linked-clone VM from snapshot {{(pid=61905) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 988.494963] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-78968b70-5613-4c00-a214-abd548671c76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.504150] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 988.504150] env[61905]: value = "task-1362851" [ 988.504150] env[61905]: _type = "Task" [ 988.504150] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.513741] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362851, 'name': CloneVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.524356] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ce3666-12b5-441e-b7d1-9c30d1895629 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.542283] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 0 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 988.798825] env[61905]: INFO nova.compute.manager [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Took 16.97 seconds to build instance. [ 989.021252] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362851, 'name': CloneVM_Task} progress is 94%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.051095] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.051522] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34247d1a-e6e7-451c-a6c7-530db0a2679a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.065139] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 989.065139] env[61905]: value = "task-1362852" [ 989.065139] env[61905]: _type = "Task" [ 989.065139] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.073643] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.302052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-224e84b8-0555-4bb3-bad4-66e5e92cc67e tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.477s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.387111] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 989.411375] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.411375] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.411375] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.411375] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.411375] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.412410] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.413262] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.413661] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.414031] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.417027] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.417027] env[61905]: DEBUG nova.virt.hardware [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.417027] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91876a8b-e1a3-44da-a83c-f8fbab503aba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.425196] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1080261-9ec7-463b-bf81-5995ee83ebc0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.522167] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362851, 'name': CloneVM_Task} progress is 95%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.574692] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362852, 'name': PowerOffVM_Task, 'duration_secs': 0.22188} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.580067] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.580067] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 17 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 989.687223] env[61905]: DEBUG nova.compute.manager [req-cd53e0e2-fe39-454e-be9d-ef6891f871b7 req-456deb3d-c176-4e52-ba27-9671c76af699 service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Received event network-vif-plugged-8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.687446] env[61905]: DEBUG oslo_concurrency.lockutils [req-cd53e0e2-fe39-454e-be9d-ef6891f871b7 req-456deb3d-c176-4e52-ba27-9671c76af699 service nova] Acquiring lock "fb417a53-b6df-4566-87f2-bd56dafd789c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.687656] env[61905]: DEBUG oslo_concurrency.lockutils [req-cd53e0e2-fe39-454e-be9d-ef6891f871b7 req-456deb3d-c176-4e52-ba27-9671c76af699 service nova] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.687831] env[61905]: DEBUG oslo_concurrency.lockutils [req-cd53e0e2-fe39-454e-be9d-ef6891f871b7 req-456deb3d-c176-4e52-ba27-9671c76af699 service nova] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.688008] env[61905]: DEBUG nova.compute.manager [req-cd53e0e2-fe39-454e-be9d-ef6891f871b7 req-456deb3d-c176-4e52-ba27-9671c76af699 service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] No waiting events found dispatching network-vif-plugged-8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 989.691565] env[61905]: WARNING nova.compute.manager [req-cd53e0e2-fe39-454e-be9d-ef6891f871b7 req-456deb3d-c176-4e52-ba27-9671c76af699 service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Received unexpected event network-vif-plugged-8b60d95f-e546-4b36-9a3a-8b44e660aa57 for instance with vm_state building and task_state spawning. [ 989.815319] env[61905]: DEBUG nova.network.neutron [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Successfully updated port: 8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 989.876648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "94df3cde-9330-41a1-bbec-1ce2a76551d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.876648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.876648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "94df3cde-9330-41a1-bbec-1ce2a76551d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.876648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.876648] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.877404] env[61905]: INFO nova.compute.manager [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Terminating instance [ 989.879235] env[61905]: DEBUG nova.compute.manager [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 989.879454] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.880304] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba97151-1793-4b38-b7f3-cded7004aae0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.887945] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.888201] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3da8f71-b7a4-4406-9bc2-39501a6718cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.894326] env[61905]: DEBUG oslo_vmware.api [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 989.894326] env[61905]: value = "task-1362853" [ 989.894326] env[61905]: _type = "Task" [ 989.894326] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.904847] env[61905]: DEBUG oslo_vmware.api [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362853, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.019780] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362851, 'name': CloneVM_Task, 'duration_secs': 1.353214} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.020124] env[61905]: INFO nova.virt.vmwareapi.vmops [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Created linked-clone VM from snapshot [ 990.021050] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cad4a79-b26d-49a3-afef-ea4ac93c5073 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.029362] env[61905]: DEBUG nova.virt.vmwareapi.images [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Uploading image 4f520639-97e7-4035-a1f7-b5b18babcac1 {{(pid=61905) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 990.044561] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Destroying the VM {{(pid=61905) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 990.044988] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3238809c-e345-4d46-92b3-40cbea0f97aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.052936] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 990.052936] env[61905]: value = "task-1362854" [ 990.052936] env[61905]: _type = "Task" [ 990.052936] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.062383] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362854, 'name': Destroy_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.085638] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:12:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.085894] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.086062] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.086316] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.086520] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.086684] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.086892] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.087060] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.087228] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.087391] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.087628] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.094355] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7210d2b2-8533-4eae-b985-7934aceadc9e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.112091] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 990.112091] env[61905]: value = "task-1362855" [ 990.112091] env[61905]: _type = "Task" [ 990.112091] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.123654] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362855, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.219320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.219570] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.318197] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-fb417a53-b6df-4566-87f2-bd56dafd789c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.318356] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-fb417a53-b6df-4566-87f2-bd56dafd789c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.318518] env[61905]: DEBUG nova.network.neutron [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.411020] env[61905]: DEBUG oslo_vmware.api [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362853, 'name': PowerOffVM_Task, 'duration_secs': 0.253468} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.411020] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.411020] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.411020] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3335c4e0-968d-4dc6-ae42-71c3bd97397b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.475038] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.475038] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.475186] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleting the datastore file [datastore1] 94df3cde-9330-41a1-bbec-1ce2a76551d6 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.475443] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34a03b83-b009-4e6c-9fad-358c5f7ecb65 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.481920] env[61905]: DEBUG oslo_vmware.api [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 990.481920] env[61905]: value = "task-1362857" [ 990.481920] env[61905]: _type = "Task" [ 990.481920] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.490596] env[61905]: DEBUG oslo_vmware.api [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.562704] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362854, 'name': Destroy_Task, 'duration_secs': 0.489534} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.562955] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Destroyed the VM [ 990.563223] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Deleting Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 990.563475] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b286625b-5fc5-44b3-af0f-ca6f3041b298 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.569736] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 990.569736] env[61905]: value = "task-1362858" [ 990.569736] env[61905]: _type = "Task" [ 990.569736] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.577951] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362858, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.622537] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362855, 'name': ReconfigVM_Task, 'duration_secs': 0.188428} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.622784] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 33 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.721645] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 990.850018] env[61905]: DEBUG nova.network.neutron [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 990.995598] env[61905]: DEBUG oslo_vmware.api [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220453} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.995860] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.996054] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.996351] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.996598] env[61905]: INFO nova.compute.manager [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 990.996872] env[61905]: DEBUG oslo.service.loopingcall [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 990.997080] env[61905]: DEBUG nova.compute.manager [-] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 990.997176] env[61905]: DEBUG nova.network.neutron [-] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 991.804326] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 991.804326] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 991.804326] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.804326] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 991.804326] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.804326] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 991.804722] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 991.804722] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 991.804805] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 991.804912] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 991.805092] env[61905]: DEBUG nova.virt.hardware [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 991.811020] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Reconfiguring VM instance instance-0000005f to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 991.814228] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362858, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.814453] env[61905]: WARNING oslo_vmware.common.loopingcall [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] task run outlasted interval by 0.243699 sec [ 991.820343] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1888e03f-f560-4477-b9c0-e57a6d9ce439 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.834634] env[61905]: DEBUG nova.compute.manager [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Received event network-changed-8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.834634] env[61905]: DEBUG nova.compute.manager [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Refreshing instance network info cache due to event network-changed-8b60d95f-e546-4b36-9a3a-8b44e660aa57. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 991.834788] env[61905]: DEBUG oslo_concurrency.lockutils [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] Acquiring lock "refresh_cache-fb417a53-b6df-4566-87f2-bd56dafd789c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.845265] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362858, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.846596] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 991.846596] env[61905]: value = "task-1362859" [ 991.846596] env[61905]: _type = "Task" [ 991.846596] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.854422] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362859, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.855432] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.855681] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.857211] env[61905]: INFO nova.compute.claims [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.038628] env[61905]: DEBUG nova.network.neutron [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Updating instance_info_cache with network_info: [{"id": "8b60d95f-e546-4b36-9a3a-8b44e660aa57", "address": "fa:16:3e:03:63:b4", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b60d95f-e5", "ovs_interfaceid": "8b60d95f-e546-4b36-9a3a-8b44e660aa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.345093] env[61905]: DEBUG oslo_vmware.api [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362858, 'name': RemoveSnapshot_Task, 'duration_secs': 1.29019} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.345402] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Deleted Snapshot of the VM instance {{(pid=61905) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 992.356984] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362859, 'name': ReconfigVM_Task, 'duration_secs': 0.258397} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.358095] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Reconfigured VM instance instance-0000005f to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 992.359048] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f10ab9f-0434-4243-8ad6-69398ec3c6bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.383055] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9/020dc38a-d4ea-41fa-a3aa-3eb63b3516d9.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.384453] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85b5d562-c261-45c5-8f9d-a818feee0f17 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.407036] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 992.407036] env[61905]: value = "task-1362860" [ 992.407036] env[61905]: _type = "Task" [ 992.407036] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.414377] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362860, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.541583] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-fb417a53-b6df-4566-87f2-bd56dafd789c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.541988] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Instance network_info: |[{"id": "8b60d95f-e546-4b36-9a3a-8b44e660aa57", "address": "fa:16:3e:03:63:b4", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b60d95f-e5", "ovs_interfaceid": "8b60d95f-e546-4b36-9a3a-8b44e660aa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 992.542422] env[61905]: DEBUG oslo_concurrency.lockutils [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] Acquired lock "refresh_cache-fb417a53-b6df-4566-87f2-bd56dafd789c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.542685] env[61905]: DEBUG nova.network.neutron [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Refreshing network info cache for port 8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.544009] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:63:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b60d95f-e546-4b36-9a3a-8b44e660aa57', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.552051] env[61905]: DEBUG oslo.service.loopingcall [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.553076] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.553335] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a6e127b-04c9-44cb-861e-e9af0f44cfe1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.573237] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.573237] env[61905]: value = "task-1362861" [ 992.573237] env[61905]: _type = "Task" [ 992.573237] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.586100] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362861, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.631016] env[61905]: DEBUG nova.network.neutron [-] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.853428] env[61905]: WARNING nova.compute.manager [None req-82decea3-7c67-4fde-be3b-ccd6c7960664 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Image not found during snapshot: nova.exception.ImageNotFound: Image 4f520639-97e7-4035-a1f7-b5b18babcac1 could not be found. [ 992.915363] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362860, 'name': ReconfigVM_Task, 'duration_secs': 0.272325} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.917778] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9/020dc38a-d4ea-41fa-a3aa-3eb63b3516d9.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.918079] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 50 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 992.985034] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752c4753-e6b1-4155-84db-a19c61097b44 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.993019] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be52a9ba-a840-4dc1-b01c-a367655b02b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.024767] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23fb711-292d-4811-b23a-d52757c47e1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.034415] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf517041-7bc4-4e56-b3ff-620b3b55183a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.049487] env[61905]: DEBUG nova.compute.provider_tree [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.084357] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362861, 'name': CreateVM_Task, 'duration_secs': 0.334668} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.086524] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.087237] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.087409] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.087723] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.088257] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99810cd5-ea18-4c88-a26c-3f4fb6acbc08 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.092809] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 993.092809] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e7e524-4bb0-a9ac-e304-c95a07358e4b" [ 993.092809] env[61905]: _type = "Task" [ 993.092809] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.102820] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e7e524-4bb0-a9ac-e304-c95a07358e4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.133643] env[61905]: INFO nova.compute.manager [-] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Took 2.14 seconds to deallocate network for instance. [ 993.270981] env[61905]: DEBUG nova.network.neutron [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Updated VIF entry in instance network info cache for port 8b60d95f-e546-4b36-9a3a-8b44e660aa57. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 993.271398] env[61905]: DEBUG nova.network.neutron [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Updating instance_info_cache with network_info: [{"id": "8b60d95f-e546-4b36-9a3a-8b44e660aa57", "address": "fa:16:3e:03:63:b4", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b60d95f-e5", "ovs_interfaceid": "8b60d95f-e546-4b36-9a3a-8b44e660aa57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.428891] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67086d62-e997-46a9-bbcf-2a5ca5f01e08 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.447934] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21161e5c-497f-4c42-9006-b5af03b1556c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.465267] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 67 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 993.553214] env[61905]: DEBUG nova.scheduler.client.report [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.603756] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e7e524-4bb0-a9ac-e304-c95a07358e4b, 'name': SearchDatastore_Task, 'duration_secs': 0.01078} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.604089] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.604337] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.604562] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.604709] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.604885] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.605459] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3ad7c07-8c64-4392-bda4-4f61f5932fd6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.613270] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.613444] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 993.614153] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e437776e-c0ba-4c45-9fbc-5834d3040365 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.619386] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 993.619386] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b9f96c-e9e3-8404-cec5-35792125f6b1" [ 993.619386] env[61905]: _type = "Task" [ 993.619386] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.626616] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b9f96c-e9e3-8404-cec5-35792125f6b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.639272] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.774039] env[61905]: DEBUG oslo_concurrency.lockutils [req-7c01768d-b477-4620-869b-2a71b076eda4 req-304b9c48-3249-499b-b835-a401b5515c5b service nova] Releasing lock "refresh_cache-fb417a53-b6df-4566-87f2-bd56dafd789c" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.817213] env[61905]: DEBUG nova.compute.manager [req-b1330716-991c-4035-90d4-d502caeb8467 req-cab9e36a-8439-41f2-b8a4-694c7563a0fa service nova] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Received event network-vif-deleted-d1579491-6761-4bf5-83f4-4528285059a2 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.975966] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "c79ae168-cf98-4b0a-a55d-a39d66f82462" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.976339] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.976500] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "c79ae168-cf98-4b0a-a55d-a39d66f82462-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.976688] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.976856] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.978929] env[61905]: INFO nova.compute.manager [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Terminating instance [ 993.980718] env[61905]: DEBUG nova.compute.manager [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 993.980917] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 993.981753] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57cc9e1-57f6-4fea-8cf1-fe680ef497b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.989345] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.989577] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fce83003-db8a-4fea-ba3c-c6b1f0e3d364 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.996065] env[61905]: DEBUG oslo_vmware.api [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 993.996065] env[61905]: value = "task-1362862" [ 993.996065] env[61905]: _type = "Task" [ 993.996065] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.004661] env[61905]: DEBUG oslo_vmware.api [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362862, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.009813] env[61905]: DEBUG nova.network.neutron [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Port 799ace69-512b-4a5b-bc68-bb41890393e7 binding to destination host cpu-1 is already ACTIVE {{(pid=61905) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 994.058029] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.202s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.058195] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 994.061123] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.422s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.061351] env[61905]: DEBUG nova.objects.instance [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lazy-loading 'resources' on Instance uuid 94df3cde-9330-41a1-bbec-1ce2a76551d6 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.130676] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b9f96c-e9e3-8404-cec5-35792125f6b1, 'name': SearchDatastore_Task, 'duration_secs': 0.008212} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.131504] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e50a4d8-27d9-45ab-91f3-ef1f9ab50038 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.138107] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 994.138107] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527f380a-f054-cc47-f16d-e39386aacc59" [ 994.138107] env[61905]: _type = "Task" [ 994.138107] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.146088] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527f380a-f054-cc47-f16d-e39386aacc59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.506276] env[61905]: DEBUG oslo_vmware.api [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362862, 'name': PowerOffVM_Task, 'duration_secs': 0.182993} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.506548] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.506723] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 994.506972] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56a6da01-56d7-4abc-9498-34a41aa9978d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.565139] env[61905]: DEBUG nova.compute.utils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.570106] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 994.570165] env[61905]: DEBUG nova.network.neutron [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.572243] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 994.572410] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 994.572585] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleting the datastore file [datastore2] c79ae168-cf98-4b0a-a55d-a39d66f82462 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 994.572837] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d83d7b4-4776-4232-8d7a-fdc798ed61ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.578554] env[61905]: DEBUG oslo_vmware.api [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for the task: (returnval){ [ 994.578554] env[61905]: value = "task-1362864" [ 994.578554] env[61905]: _type = "Task" [ 994.578554] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.586238] env[61905]: DEBUG oslo_vmware.api [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.610587] env[61905]: DEBUG nova.policy [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feefcc2d831d457181dc441f0310a58b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f90126694e884e2dbc7f57b7adb96afa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 994.648976] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]527f380a-f054-cc47-f16d-e39386aacc59, 'name': SearchDatastore_Task, 'duration_secs': 0.014326} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.651202] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.651494] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] fb417a53-b6df-4566-87f2-bd56dafd789c/fb417a53-b6df-4566-87f2-bd56dafd789c.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.651910] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4500241f-ba19-4c57-8aaa-f658adc6bac0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.658226] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 994.658226] env[61905]: value = "task-1362865" [ 994.658226] env[61905]: _type = "Task" [ 994.658226] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.668331] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.697321] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68115f86-41c4-4ae1-88c6-7e16672ecdd5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.704655] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3c0cf2-5d35-45cf-bb75-50955a361772 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.738241] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b20af66-68bd-4c70-b597-7bfb4c769336 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.743011] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d9b147-6fcb-4211-a177-9ff26688ba28 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.756266] env[61905]: DEBUG nova.compute.provider_tree [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.880770] env[61905]: DEBUG nova.network.neutron [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Successfully created port: 560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.037022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.037022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.037022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.071031] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 995.092201] env[61905]: DEBUG oslo_vmware.api [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Task: {'id': task-1362864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184778} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.092679] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.092934] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.093205] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.093672] env[61905]: INFO nova.compute.manager [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Took 1.11 seconds to destroy the instance on the hypervisor. [ 995.093852] env[61905]: DEBUG oslo.service.loopingcall [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.094177] env[61905]: DEBUG nova.compute.manager [-] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 995.094309] env[61905]: DEBUG nova.network.neutron [-] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.169114] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362865, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.260188] env[61905]: DEBUG nova.scheduler.client.report [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 995.668755] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579252} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.669182] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] fb417a53-b6df-4566-87f2-bd56dafd789c/fb417a53-b6df-4566-87f2-bd56dafd789c.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.669441] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.669699] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aeff1607-e5a0-4f21-874b-6333c81f9d81 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.676132] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 995.676132] env[61905]: value = "task-1362866" [ 995.676132] env[61905]: _type = "Task" [ 995.676132] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.684122] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.767663] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.789986] env[61905]: INFO nova.scheduler.client.report [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance 94df3cde-9330-41a1-bbec-1ce2a76551d6 [ 995.840488] env[61905]: DEBUG nova.compute.manager [req-eb156a6a-9d17-4172-abfb-b2eebca4070f req-ac62109f-5554-41e8-b318-1755ed5745c8 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Received event network-vif-deleted-478465ef-4d52-4a2c-8e3c-befc6b84536c {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 995.840696] env[61905]: INFO nova.compute.manager [req-eb156a6a-9d17-4172-abfb-b2eebca4070f req-ac62109f-5554-41e8-b318-1755ed5745c8 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Neutron deleted interface 478465ef-4d52-4a2c-8e3c-befc6b84536c; detaching it from the instance and deleting it from the info cache [ 995.840869] env[61905]: DEBUG nova.network.neutron [req-eb156a6a-9d17-4172-abfb-b2eebca4070f req-ac62109f-5554-41e8-b318-1755ed5745c8 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.870088] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.870410] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.895016] env[61905]: DEBUG nova.network.neutron [-] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.077819] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.077819] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.077819] env[61905]: DEBUG nova.network.neutron [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 996.081338] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 996.107896] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 996.108239] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 996.108558] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.108775] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 996.108933] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.109098] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 996.109316] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 996.109481] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 996.109650] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 996.109813] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 996.109984] env[61905]: DEBUG nova.virt.hardware [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 996.111158] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb725fe-a594-428c-9b40-7e70ced814fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.119279] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6745d8-ee4b-4326-921e-0d1ddb0bad94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.185437] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062142} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.186307] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.186858] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b857923-b99b-4beb-ab09-e4686dce271d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.208708] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] fb417a53-b6df-4566-87f2-bd56dafd789c/fb417a53-b6df-4566-87f2-bd56dafd789c.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.208973] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b0272a3-fab7-4c9d-b279-eee43eb96f61 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.228416] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 996.228416] env[61905]: value = "task-1362867" [ 996.228416] env[61905]: _type = "Task" [ 996.228416] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.238139] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.298341] env[61905]: DEBUG oslo_concurrency.lockutils [None req-a76d9bd4-c47c-4c15-a7fb-faf9b28e6f82 tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "94df3cde-9330-41a1-bbec-1ce2a76551d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.424s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.343410] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-882ddaea-0c2c-4506-8890-0bfe12bb695e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.356038] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41546467-aad1-4762-bb80-5e2286dcc3c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.375252] env[61905]: INFO nova.compute.manager [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Detaching volume 7c5adcd9-fa15-4e5e-b5bf-e23627647ad2 [ 996.383791] env[61905]: DEBUG nova.compute.manager [req-eb156a6a-9d17-4172-abfb-b2eebca4070f req-ac62109f-5554-41e8-b318-1755ed5745c8 service nova] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Detach interface failed, port_id=478465ef-4d52-4a2c-8e3c-befc6b84536c, reason: Instance c79ae168-cf98-4b0a-a55d-a39d66f82462 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 996.396940] env[61905]: INFO nova.compute.manager [-] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Took 1.30 seconds to deallocate network for instance. [ 996.433281] env[61905]: INFO nova.virt.block_device [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Attempting to driver detach volume 7c5adcd9-fa15-4e5e-b5bf-e23627647ad2 from mountpoint /dev/sdb [ 996.434029] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 996.434029] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290084', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'name': 'volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7b0db0a2-c990-4160-9be8-018239425114', 'attached_at': '', 'detached_at': '', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'serial': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 996.434715] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3787f3-355f-4f67-9136-6af389f4ad3a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.457319] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a3d2b7-e8de-4a2c-ab93-f6622253e282 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.465320] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96800012-a8d3-4b89-aa0c-1c2c46ba012b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.487682] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d38f12-be99-4351-b12f-f4f536838d6c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.502854] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] The volume has not been displaced from its original location: [datastore2] volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2/volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 996.507989] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 996.508713] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9820243e-6039-4729-b1d6-68cc5fb864a2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.526573] env[61905]: DEBUG oslo_vmware.api [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 996.526573] env[61905]: value = "task-1362868" [ 996.526573] env[61905]: _type = "Task" [ 996.526573] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.534494] env[61905]: DEBUG oslo_vmware.api [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.577155] env[61905]: DEBUG nova.network.neutron [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Successfully updated port: 560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.738280] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362867, 'name': ReconfigVM_Task, 'duration_secs': 0.26397} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.738587] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Reconfigured VM instance instance-00000062 to attach disk [datastore1] fb417a53-b6df-4566-87f2-bd56dafd789c/fb417a53-b6df-4566-87f2-bd56dafd789c.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.739265] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2674865-e9f7-4128-9cdd-85591808e3eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.748633] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 996.748633] env[61905]: value = "task-1362869" [ 996.748633] env[61905]: _type = "Task" [ 996.748633] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.756415] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362869, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.816222] env[61905]: DEBUG nova.network.neutron [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [{"id": "799ace69-512b-4a5b-bc68-bb41890393e7", "address": "fa:16:3e:47:fb:4a", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap799ace69-51", "ovs_interfaceid": "799ace69-512b-4a5b-bc68-bb41890393e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.904663] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.904988] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.905262] env[61905]: DEBUG nova.objects.instance [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lazy-loading 'resources' on Instance uuid c79ae168-cf98-4b0a-a55d-a39d66f82462 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.004397] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.004909] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.037788] env[61905]: DEBUG oslo_vmware.api [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362868, 'name': ReconfigVM_Task, 'duration_secs': 0.367153} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.038093] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 997.043023] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-316ee4cb-1012-4827-8f52-f3445dbfaf72 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.058902] env[61905]: DEBUG oslo_vmware.api [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 997.058902] env[61905]: value = "task-1362870" [ 997.058902] env[61905]: _type = "Task" [ 997.058902] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.066869] env[61905]: DEBUG oslo_vmware.api [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362870, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.080117] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.080504] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquired lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.080504] env[61905]: DEBUG nova.network.neutron [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.261668] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362869, 'name': Rename_Task, 'duration_secs': 0.133336} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.261973] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.262262] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-483571df-e300-4569-bb85-9e119390d8f3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.270010] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 997.270010] env[61905]: value = "task-1362871" [ 997.270010] env[61905]: _type = "Task" [ 997.270010] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.278368] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.320677] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.507328] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 997.549496] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627b3b77-ed22-4021-b447-46b0fff72d00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.558713] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a875f7-33d5-47fb-9d0c-445fac3f6fdd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.571912] env[61905]: DEBUG oslo_vmware.api [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362870, 'name': ReconfigVM_Task, 'duration_secs': 0.201121} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.594924] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290084', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'name': 'volume-7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7b0db0a2-c990-4160-9be8-018239425114', 'attached_at': '', 'detached_at': '', 'volume_id': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2', 'serial': '7c5adcd9-fa15-4e5e-b5bf-e23627647ad2'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 997.601674] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86814e23-3c93-4802-b59b-73a69e27ae82 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.610503] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e57ed4c-c8dd-4deb-8bd3-3fc778c132d3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.632761] env[61905]: DEBUG nova.compute.provider_tree [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.649311] env[61905]: DEBUG nova.network.neutron [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 997.780179] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362871, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.799027] env[61905]: DEBUG nova.network.neutron [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updating instance_info_cache with network_info: [{"id": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "address": "fa:16:3e:d7:9a:ba", "network": {"id": "3d117fab-65b4-4a5b-9b3c-c4091cf6f24c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-335702686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90126694e884e2dbc7f57b7adb96afa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap560cc744-85", "ovs_interfaceid": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.846201] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e6c260-f05e-4afd-8376-7c6572021203 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.864384] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffce7c27-b006-4b70-a53a-ac7e073b6ec9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.869199] env[61905]: DEBUG nova.compute.manager [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Received event network-vif-plugged-560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.869413] env[61905]: DEBUG oslo_concurrency.lockutils [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] Acquiring lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.869618] env[61905]: DEBUG oslo_concurrency.lockutils [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.869782] env[61905]: DEBUG oslo_concurrency.lockutils [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.869946] env[61905]: DEBUG nova.compute.manager [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] No waiting events found dispatching network-vif-plugged-560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 997.870137] env[61905]: WARNING nova.compute.manager [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Received unexpected event network-vif-plugged-560cc744-85c9-41e0-9cca-2eccf7ca8535 for instance with vm_state building and task_state spawning. [ 997.870278] env[61905]: DEBUG nova.compute.manager [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Received event network-changed-560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.870437] env[61905]: DEBUG nova.compute.manager [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Refreshing instance network info cache due to event network-changed-560cc744-85c9-41e0-9cca-2eccf7ca8535. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 997.870599] env[61905]: DEBUG oslo_concurrency.lockutils [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] Acquiring lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.877289] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 83 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 998.028921] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.137371] env[61905]: DEBUG nova.scheduler.client.report [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 998.146657] env[61905]: DEBUG nova.objects.instance [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.280530] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362871, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.301926] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Releasing lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.302495] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Instance network_info: |[{"id": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "address": "fa:16:3e:d7:9a:ba", "network": {"id": "3d117fab-65b4-4a5b-9b3c-c4091cf6f24c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-335702686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90126694e884e2dbc7f57b7adb96afa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap560cc744-85", "ovs_interfaceid": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 998.302936] env[61905]: DEBUG oslo_concurrency.lockutils [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] Acquired lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.303259] env[61905]: DEBUG nova.network.neutron [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Refreshing network info cache for port 560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 998.304828] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:9a:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bab6a6c3-1c5c-4776-b21b-dec21196d702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '560cc744-85c9-41e0-9cca-2eccf7ca8535', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.312346] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Creating folder: Project (f90126694e884e2dbc7f57b7adb96afa). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 998.315249] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f355881-be3c-43f8-ae81-0fdddec55f54 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.325783] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Created folder: Project (f90126694e884e2dbc7f57b7adb96afa) in parent group-v289968. [ 998.325971] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Creating folder: Instances. Parent ref: group-v290098. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 998.326226] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08b086e7-96c9-42d6-8094-72f484b30a7b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.334832] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Created folder: Instances in parent group-v290098. [ 998.335080] env[61905]: DEBUG oslo.service.loopingcall [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.335279] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.335486] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1cf56ed-fe97-41b0-8810-d47e26c9fd68 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.356950] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.356950] env[61905]: value = "task-1362874" [ 998.356950] env[61905]: _type = "Task" [ 998.356950] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.365084] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362874, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.383413] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 998.383746] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69bb85ef-fb49-41ed-b804-f2c723756425 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.391208] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 998.391208] env[61905]: value = "task-1362875" [ 998.391208] env[61905]: _type = "Task" [ 998.391208] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.399344] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.561406] env[61905]: DEBUG nova.network.neutron [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updated VIF entry in instance network info cache for port 560cc744-85c9-41e0-9cca-2eccf7ca8535. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 998.561974] env[61905]: DEBUG nova.network.neutron [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updating instance_info_cache with network_info: [{"id": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "address": "fa:16:3e:d7:9a:ba", "network": {"id": "3d117fab-65b4-4a5b-9b3c-c4091cf6f24c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-335702686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90126694e884e2dbc7f57b7adb96afa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap560cc744-85", "ovs_interfaceid": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.643059] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.645987] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.616s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.646739] env[61905]: INFO nova.compute.claims [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 998.664912] env[61905]: INFO nova.scheduler.client.report [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Deleted allocations for instance c79ae168-cf98-4b0a-a55d-a39d66f82462 [ 998.781542] env[61905]: DEBUG oslo_vmware.api [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362871, 'name': PowerOnVM_Task, 'duration_secs': 1.027603} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.785029] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.785029] env[61905]: INFO nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Took 9.40 seconds to spawn the instance on the hypervisor. [ 998.785029] env[61905]: DEBUG nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 998.785029] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918aa03c-5d55-4820-bc54-597553b680f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.869735] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362874, 'name': CreateVM_Task, 'duration_secs': 0.314492} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.869735] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 998.869735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.869735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.869735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 998.869735] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb232223-516a-4362-9438-64a7b7ea4774 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.875022] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 998.875022] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fb403b-c723-873a-2f3b-cf3473ab19a0" [ 998.875022] env[61905]: _type = "Task" [ 998.875022] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.880401] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fb403b-c723-873a-2f3b-cf3473ab19a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.899903] env[61905]: DEBUG oslo_vmware.api [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362875, 'name': PowerOnVM_Task, 'duration_secs': 0.373768} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.900184] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.900390] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3a672b8b-ce93-4b8c-9615-4b2494e5a355 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance '020dc38a-d4ea-41fa-a3aa-3eb63b3516d9' progress to 100 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 999.065505] env[61905]: DEBUG oslo_concurrency.lockutils [req-49f0f982-98f8-44d8-bc54-dcb0dac3f04e req-8f020c70-c60b-41f0-881e-cdd5d6892797 service nova] Releasing lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.088983] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.156086] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6688c8c7-43ab-463a-8c8f-43df97f49e0b tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.286s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.157028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.068s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.157222] env[61905]: DEBUG nova.compute.manager [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 999.158210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840346c4-bd87-4b7a-a25f-869df3b45506 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.165852] env[61905]: DEBUG nova.compute.manager [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61905) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 999.166416] env[61905]: DEBUG nova.objects.instance [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.171760] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52234241-8e96-4899-b372-d2139781a070 tempest-ImagesTestJSON-247741105 tempest-ImagesTestJSON-247741105-project-member] Lock "c79ae168-cf98-4b0a-a55d-a39d66f82462" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.195s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.301937] env[61905]: INFO nova.compute.manager [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Took 15.79 seconds to build instance. [ 999.386507] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fb403b-c723-873a-2f3b-cf3473ab19a0, 'name': SearchDatastore_Task, 'duration_secs': 0.011039} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.387405] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.387649] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.387881] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.388042] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.388230] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.388770] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cd40b1c-5fa9-44bf-a88a-f71bb4618e89 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.396723] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.396903] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.397670] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15100a68-08b1-4ede-8b14-f73c55c41048 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.403149] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 999.403149] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d6628b-aa9a-cef5-93ac-00572bc0ba86" [ 999.403149] env[61905]: _type = "Task" [ 999.403149] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.414100] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d6628b-aa9a-cef5-93ac-00572bc0ba86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.671496] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.671783] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f6f2746-25c4-4bd4-8987-a0696307a815 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.683320] env[61905]: DEBUG oslo_vmware.api [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 999.683320] env[61905]: value = "task-1362876" [ 999.683320] env[61905]: _type = "Task" [ 999.683320] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.696831] env[61905]: DEBUG oslo_vmware.api [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.770573] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a863fd-952e-4ec9-8752-f9064e4610ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.779756] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6955071-8995-40a7-a4ee-7467f33bb707 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.812162] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3aaa4c6b-0af0-4f94-a013-a53cc9b71d95 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.305s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.816024] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09da191e-d208-4309-9301-55d0037e9a7b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.826259] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8d3b95-db5a-4415-a73d-67dc231bb276 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.842456] env[61905]: DEBUG nova.compute.provider_tree [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.877028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.878017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.878017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.878017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.878017] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.880077] env[61905]: INFO nova.compute.manager [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Terminating instance [ 999.885730] env[61905]: DEBUG nova.compute.manager [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 999.885730] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.885730] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da754fb-7cb8-4279-b17d-9fb6c4af32c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.892568] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.892817] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-511cdd76-8b47-4281-a89a-c66fb4819f4c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.899181] env[61905]: DEBUG oslo_vmware.api [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 999.899181] env[61905]: value = "task-1362877" [ 999.899181] env[61905]: _type = "Task" [ 999.899181] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.906576] env[61905]: DEBUG oslo_vmware.api [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.919380] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d6628b-aa9a-cef5-93ac-00572bc0ba86, 'name': SearchDatastore_Task, 'duration_secs': 0.009859} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.920214] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e2f8742-b3fb-4a34-bd9b-67269aa3ea36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.925344] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 999.925344] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5270033b-afaf-aac7-f985-c0219e01aaeb" [ 999.925344] env[61905]: _type = "Task" [ 999.925344] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.933329] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5270033b-afaf-aac7-f985-c0219e01aaeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.193660] env[61905]: DEBUG oslo_vmware.api [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362876, 'name': PowerOffVM_Task, 'duration_secs': 0.207464} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.197017] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.197017] env[61905]: DEBUG nova.compute.manager [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1000.197017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67619cc-4059-4686-b574-f1c5f395aa76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.345987] env[61905]: DEBUG nova.scheduler.client.report [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1000.414065] env[61905]: DEBUG oslo_vmware.api [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362877, 'name': PowerOffVM_Task, 'duration_secs': 0.20517} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.420494] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.420930] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.424096] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6c60a0a-5382-4e06-8e47-9c0908beba78 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.443503] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5270033b-afaf-aac7-f985-c0219e01aaeb, 'name': SearchDatastore_Task, 'duration_secs': 0.009376} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.443787] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.444061] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa/15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.444448] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c8e41db-86ea-41fe-852b-f237c1e01237 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.453887] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1000.453887] env[61905]: value = "task-1362879" [ 1000.453887] env[61905]: _type = "Task" [ 1000.453887] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.462856] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.492349] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.492425] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.492587] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleting the datastore file [datastore2] 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.492879] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b97b5397-60ee-4f08-acf2-cbc428e99f53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.499521] env[61905]: DEBUG oslo_vmware.api [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for the task: (returnval){ [ 1000.499521] env[61905]: value = "task-1362880" [ 1000.499521] env[61905]: _type = "Task" [ 1000.499521] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.509519] env[61905]: DEBUG oslo_vmware.api [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.510639] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6bcf9d-357b-41a7-83f0-2763d74818e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.521521] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Suspending the VM {{(pid=61905) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1000.521802] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d8687609-cd30-42f9-8569-04447ef952e1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.534023] env[61905]: DEBUG oslo_vmware.api [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1000.534023] env[61905]: value = "task-1362881" [ 1000.534023] env[61905]: _type = "Task" [ 1000.534023] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.544077] env[61905]: DEBUG oslo_vmware.api [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362881, 'name': SuspendVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.676052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.676424] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.676692] env[61905]: DEBUG nova.compute.manager [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Going to confirm migration 2 {{(pid=61905) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1000.708907] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0092b980-adfd-4209-8038-7c9e42af5ff4 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.850517] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.851337] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1000.964330] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362879, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498853} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.964622] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa/15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.964845] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.965128] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96881ae0-6104-4305-9496-746f24b4d663 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.971388] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1000.971388] env[61905]: value = "task-1362882" [ 1000.971388] env[61905]: _type = "Task" [ 1000.971388] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.979212] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.012302] env[61905]: DEBUG oslo_vmware.api [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Task: {'id': task-1362880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397681} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.012583] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.012762] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.012935] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.013128] env[61905]: INFO nova.compute.manager [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1001.013716] env[61905]: DEBUG oslo.service.loopingcall [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.013844] env[61905]: DEBUG nova.compute.manager [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1001.013936] env[61905]: DEBUG nova.network.neutron [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1001.042119] env[61905]: DEBUG oslo_vmware.api [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362881, 'name': SuspendVM_Task} progress is 62%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.253842] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.253842] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.253842] env[61905]: DEBUG nova.network.neutron [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1001.255347] env[61905]: DEBUG nova.objects.instance [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lazy-loading 'info_cache' on Instance uuid 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.357132] env[61905]: DEBUG nova.compute.utils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.358786] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1001.359121] env[61905]: DEBUG nova.network.neutron [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1001.439152] env[61905]: DEBUG nova.policy [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c91bb12e5a60408caa04ae70ecb1dd14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f1c8d91a7b4119bb32c82ef7bd940f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1001.488703] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110508} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.488968] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.489790] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45eec72-d7ba-4cde-bc0b-86c2c46d03de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.518662] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa/15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.519536] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8740c04-5542-4bab-ace3-65963a856919 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.546496] env[61905]: DEBUG oslo_vmware.api [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362881, 'name': SuspendVM_Task, 'duration_secs': 0.697257} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.547768] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Suspended the VM {{(pid=61905) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1001.548052] env[61905]: DEBUG nova.compute.manager [None req-f0b8e05c-4e9a-421e-bc3b-a63f54ebd0d1 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1001.550580] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1001.550580] env[61905]: value = "task-1362883" [ 1001.550580] env[61905]: _type = "Task" [ 1001.550580] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.550580] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe8881c-ecf1-46c8-9299-1e2463b04494 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.564970] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362883, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.713441] env[61905]: DEBUG nova.compute.manager [req-6dd56fed-d8ca-4aab-bce6-f2f02a7708e7 req-6996db18-1630-47b5-a563-87141d46ce3a service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Received event network-vif-deleted-767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.714804] env[61905]: INFO nova.compute.manager [req-6dd56fed-d8ca-4aab-bce6-f2f02a7708e7 req-6996db18-1630-47b5-a563-87141d46ce3a service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Neutron deleted interface 767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0; detaching it from the instance and deleting it from the info cache [ 1001.714804] env[61905]: DEBUG nova.network.neutron [req-6dd56fed-d8ca-4aab-bce6-f2f02a7708e7 req-6996db18-1630-47b5-a563-87141d46ce3a service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.753436] env[61905]: DEBUG nova.objects.instance [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.828212] env[61905]: DEBUG nova.network.neutron [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Successfully created port: 2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.862072] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1002.062929] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.183735] env[61905]: DEBUG nova.network.neutron [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.218523] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5455656c-9e66-44fe-b874-83ad93caef8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.227855] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa17652-dbaf-4108-8253-84ba55d0619d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.253992] env[61905]: DEBUG nova.compute.manager [req-6dd56fed-d8ca-4aab-bce6-f2f02a7708e7 req-6996db18-1630-47b5-a563-87141d46ce3a service nova] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Detach interface failed, port_id=767ea9d8-deb7-4b1f-b012-5a8a1a5cd6a0, reason: Instance 1502df44-9166-4ce8-9117-a57e7be2d299 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1002.257705] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.257870] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.258049] env[61905]: DEBUG nova.network.neutron [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.258234] env[61905]: DEBUG nova.objects.instance [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'info_cache' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.458304] env[61905]: DEBUG nova.network.neutron [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [{"id": "799ace69-512b-4a5b-bc68-bb41890393e7", "address": "fa:16:3e:47:fb:4a", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap799ace69-51", "ovs_interfaceid": "799ace69-512b-4a5b-bc68-bb41890393e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.561914] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362883, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.685516] env[61905]: INFO nova.compute.manager [-] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Took 1.67 seconds to deallocate network for instance. [ 1002.762105] env[61905]: DEBUG nova.objects.base [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Object Instance<7b0db0a2-c990-4160-9be8-018239425114> lazy-loaded attributes: flavor,info_cache {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1002.874345] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1002.898244] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.898518] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.898675] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.898855] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.899008] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.899169] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.899377] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.899539] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.899713] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.899877] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.900067] env[61905]: DEBUG nova.virt.hardware [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.901217] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8e6cfe-61cf-4879-af04-1cd9943137cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.909017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330054bc-3280-4b8f-84c9-a3502b589df3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.960944] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.961251] env[61905]: DEBUG nova.objects.instance [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lazy-loading 'migration_context' on Instance uuid 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.013769] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "fb417a53-b6df-4566-87f2-bd56dafd789c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.014150] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.014248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "fb417a53-b6df-4566-87f2-bd56dafd789c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.014521] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.014708] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.016973] env[61905]: INFO nova.compute.manager [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Terminating instance [ 1003.021936] env[61905]: DEBUG nova.compute.manager [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1003.022228] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.023202] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97de68a-9dd5-484b-b7ac-9d7f17372d7c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.032544] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.032544] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b54ae301-3114-4145-a9e3-5dc7ebd428ce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.066802] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362883, 'name': ReconfigVM_Task, 'duration_secs': 1.052198} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.067123] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa/15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.069397] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79d045d4-ae19-428c-b326-05b3631134ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.075858] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1003.075858] env[61905]: value = "task-1362885" [ 1003.075858] env[61905]: _type = "Task" [ 1003.075858] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.084366] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362885, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.094195] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1003.094195] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1003.094329] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleting the datastore file [datastore1] fb417a53-b6df-4566-87f2-bd56dafd789c {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.094593] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e88cfd4-ce1e-4a60-95f1-2ebf94ae8251 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.101340] env[61905]: DEBUG oslo_vmware.api [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1003.101340] env[61905]: value = "task-1362886" [ 1003.101340] env[61905]: _type = "Task" [ 1003.101340] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.109598] env[61905]: DEBUG oslo_vmware.api [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.192194] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.192566] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.192840] env[61905]: DEBUG nova.objects.instance [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lazy-loading 'resources' on Instance uuid 1502df44-9166-4ce8-9117-a57e7be2d299 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.456444] env[61905]: DEBUG nova.network.neutron [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Successfully updated port: 2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.464191] env[61905]: DEBUG nova.objects.base [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Object Instance<020dc38a-d4ea-41fa-a3aa-3eb63b3516d9> lazy-loaded attributes: info_cache,migration_context {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1003.465095] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33f5bc3-df6f-42e4-b045-fda67032143c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.486872] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68285971-5c8e-442b-b59e-96eb96d30b40 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.492608] env[61905]: DEBUG oslo_vmware.api [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1003.492608] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ac0c87-e42e-a3c5-43d9-4f2178001fc5" [ 1003.492608] env[61905]: _type = "Task" [ 1003.492608] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.500701] env[61905]: DEBUG oslo_vmware.api [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52ac0c87-e42e-a3c5-43d9-4f2178001fc5, 'name': SearchDatastore_Task} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.500953] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.550826] env[61905]: DEBUG nova.network.neutron [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [{"id": "55782888-9c3d-4f40-852f-9cff30eb514b", "address": "fa:16:3e:6e:f1:7a", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55782888-9c", "ovs_interfaceid": "55782888-9c3d-4f40-852f-9cff30eb514b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.585845] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362885, 'name': Rename_Task, 'duration_secs': 0.156558} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.586416] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.586416] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cde9ff3-a787-4253-a8f4-37e4dbebe512 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.594211] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1003.594211] env[61905]: value = "task-1362887" [ 1003.594211] env[61905]: _type = "Task" [ 1003.594211] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.601953] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.608612] env[61905]: DEBUG oslo_vmware.api [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210157} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.608923] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.609126] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.609310] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.609486] env[61905]: INFO nova.compute.manager [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1003.609709] env[61905]: DEBUG oslo.service.loopingcall [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.609897] env[61905]: DEBUG nova.compute.manager [-] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1003.609988] env[61905]: DEBUG nova.network.neutron [-] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.781011] env[61905]: DEBUG nova.compute.manager [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Received event network-vif-plugged-2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.781237] env[61905]: DEBUG oslo_concurrency.lockutils [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] Acquiring lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.781653] env[61905]: DEBUG oslo_concurrency.lockutils [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.781885] env[61905]: DEBUG oslo_concurrency.lockutils [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.782129] env[61905]: DEBUG nova.compute.manager [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] No waiting events found dispatching network-vif-plugged-2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1003.782432] env[61905]: WARNING nova.compute.manager [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Received unexpected event network-vif-plugged-2f77e83c-7f69-45c3-bbef-b3273f2f1a5e for instance with vm_state building and task_state spawning. [ 1003.782555] env[61905]: DEBUG nova.compute.manager [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Received event network-changed-2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1003.782759] env[61905]: DEBUG nova.compute.manager [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Refreshing instance network info cache due to event network-changed-2f77e83c-7f69-45c3-bbef-b3273f2f1a5e. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1003.783051] env[61905]: DEBUG oslo_concurrency.lockutils [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] Acquiring lock "refresh_cache-7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.783196] env[61905]: DEBUG oslo_concurrency.lockutils [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] Acquired lock "refresh_cache-7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.783405] env[61905]: DEBUG nova.network.neutron [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Refreshing network info cache for port 2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.833832] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8641f006-21f8-4f5b-8eb3-38af6575aba0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.843481] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d1ed42-9d01-4744-9f99-197fc931f088 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.879755] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a013b4-c114-4b2f-8d81-21b3fd4e6f29 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.888942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c54016c-b8b1-47ec-b092-18ab4979bc89 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.902221] env[61905]: DEBUG nova.compute.provider_tree [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.959531] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "refresh_cache-7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.054538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "refresh_cache-7b0db0a2-c990-4160-9be8-018239425114" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.104043] env[61905]: DEBUG oslo_vmware.api [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362887, 'name': PowerOnVM_Task, 'duration_secs': 0.496941} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.104278] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.104383] env[61905]: INFO nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Took 8.02 seconds to spawn the instance on the hypervisor. [ 1004.104795] env[61905]: DEBUG nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1004.106043] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6d11ba-73ae-47a7-ae14-27efa2d07a7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.318513] env[61905]: DEBUG nova.network.neutron [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1004.386301] env[61905]: DEBUG nova.network.neutron [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.404856] env[61905]: DEBUG nova.scheduler.client.report [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.557222] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.557563] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2bef45c-6ed3-4cc9-a038-fe3ccf27e24c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.564664] env[61905]: DEBUG oslo_vmware.api [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1004.564664] env[61905]: value = "task-1362888" [ 1004.564664] env[61905]: _type = "Task" [ 1004.564664] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.578182] env[61905]: DEBUG nova.network.neutron [-] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.579329] env[61905]: DEBUG oslo_vmware.api [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.625937] env[61905]: INFO nova.compute.manager [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Took 12.81 seconds to build instance. [ 1004.888644] env[61905]: DEBUG oslo_concurrency.lockutils [req-1407d888-6375-43c9-be21-d71efa5dee72 req-a1bb5d3e-35fd-4585-8346-4f29243b5032 service nova] Releasing lock "refresh_cache-7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.889057] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "refresh_cache-7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.889214] env[61905]: DEBUG nova.network.neutron [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1004.909542] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.911752] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.411s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.934798] env[61905]: INFO nova.scheduler.client.report [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Deleted allocations for instance 1502df44-9166-4ce8-9117-a57e7be2d299 [ 1005.075678] env[61905]: DEBUG oslo_vmware.api [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362888, 'name': PowerOnVM_Task, 'duration_secs': 0.418857} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.075971] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.076168] env[61905]: DEBUG nova.compute.manager [None req-49fd9abe-66b4-4210-9f96-01e90369ea1e tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1005.076951] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89dc707-20f8-4d5c-8c3a-0a60a80dbf2b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.079995] env[61905]: INFO nova.compute.manager [-] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Took 1.47 seconds to deallocate network for instance. [ 1005.128255] env[61905]: DEBUG oslo_concurrency.lockutils [None req-375cfc8f-9f32-4f93-8732-e460a5df40c8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.908s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.442547] env[61905]: DEBUG oslo_concurrency.lockutils [None req-595c2715-93b4-446e-ba89-f9d5e11ae83f tempest-AttachVolumeShelveTestJSON-1351430112 tempest-AttachVolumeShelveTestJSON-1351430112-project-member] Lock "1502df44-9166-4ce8-9117-a57e7be2d299" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.565s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.451197] env[61905]: DEBUG nova.network.neutron [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1005.509624] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f1760a-a66d-4991-a4eb-112af1a12b84 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.517296] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3100164d-bb2a-47c1-bfb1-96028d322a3a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.550392] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da437be-7eca-4035-96e1-58dd29f512cc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.557614] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed596e1-6e94-4ace-b8c8-a831ffa05ce0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.570691] env[61905]: DEBUG nova.compute.provider_tree [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.591090] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.698615] env[61905]: DEBUG nova.network.neutron [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Updating instance_info_cache with network_info: [{"id": "2f77e83c-7f69-45c3-bbef-b3273f2f1a5e", "address": "fa:16:3e:f3:7f:5a", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77e83c-7f", "ovs_interfaceid": "2f77e83c-7f69-45c3-bbef-b3273f2f1a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.807317] env[61905]: DEBUG nova.compute.manager [req-0588d05b-ee0b-45c2-8448-61a9982cc2b9 req-1a6645b9-6235-41d7-9f9f-a82c35bfbd7c service nova] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Received event network-vif-deleted-8b60d95f-e546-4b36-9a3a-8b44e660aa57 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1006.073986] env[61905]: DEBUG nova.scheduler.client.report [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1006.201248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "refresh_cache-7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.201893] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Instance network_info: |[{"id": "2f77e83c-7f69-45c3-bbef-b3273f2f1a5e", "address": "fa:16:3e:f3:7f:5a", "network": {"id": "3b36df6b-c469-4d18-82aa-dc089c91a852", "bridge": "br-int", "label": "tempest-ServersTestJSON-988745219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "82f1c8d91a7b4119bb32c82ef7bd940f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2f77e83c-7f", "ovs_interfaceid": "2f77e83c-7f69-45c3-bbef-b3273f2f1a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1006.202267] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:7f:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2f77e83c-7f69-45c3-bbef-b3273f2f1a5e', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.209732] env[61905]: DEBUG oslo.service.loopingcall [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1006.209959] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.210201] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0870e6e-6ac6-42c0-9ca2-d8dae60fba46 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.232586] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.232586] env[61905]: value = "task-1362889" [ 1006.232586] env[61905]: _type = "Task" [ 1006.232586] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.242076] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362889, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.742246] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362889, 'name': CreateVM_Task, 'duration_secs': 0.337475} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.742615] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1006.743065] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.743250] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.743617] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1006.744195] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a35d4eb-1c34-4f4b-b78c-eb65378b2703 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.748708] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1006.748708] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d38e1b-64c9-76f7-5fc3-a47678bd0d70" [ 1006.748708] env[61905]: _type = "Task" [ 1006.748708] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.756052] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d38e1b-64c9-76f7-5fc3-a47678bd0d70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.086045] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.174s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.088934] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.498s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.089177] env[61905]: DEBUG nova.objects.instance [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'resources' on Instance uuid fb417a53-b6df-4566-87f2-bd56dafd789c {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.261243] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d38e1b-64c9-76f7-5fc3-a47678bd0d70, 'name': SearchDatastore_Task, 'duration_secs': 0.009333} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.261474] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.261710] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.261944] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.262108] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.262291] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.262554] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e5cfa1b4-e36d-471a-bc49-5a7e903f3c94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.271833] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.272055] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.272744] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-622322c2-962e-4034-9651-a7483f4f6001 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.278079] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1007.278079] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e967f-5066-c6a4-e473-065667efdafd" [ 1007.278079] env[61905]: _type = "Task" [ 1007.278079] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.285413] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e967f-5066-c6a4-e473-065667efdafd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.653016] env[61905]: INFO nova.scheduler.client.report [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocation for migration 1a72af17-8baa-4c03-a33e-60d9cd0218a9 [ 1007.695017] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432aef7b-59b5-4ee2-b88a-82d069921a0a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.702758] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aea4d0a-e737-4143-b906-810cde74ffb7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.732931] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1039b55-c31a-4e4a-91a0-fbef4104bf8d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.740517] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c86f9fa-a536-4aae-aebe-3c53867cb58b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.753544] env[61905]: DEBUG nova.compute.provider_tree [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.788420] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e967f-5066-c6a4-e473-065667efdafd, 'name': SearchDatastore_Task, 'duration_secs': 0.010551} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.789119] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f61eea6-6c67-4dc3-99dc-bfb89b85e73e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.794056] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1007.794056] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5257ee20-4ba9-ca80-666a-87d474c5b6f3" [ 1007.794056] env[61905]: _type = "Task" [ 1007.794056] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.801889] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5257ee20-4ba9-ca80-666a-87d474c5b6f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.837846] env[61905]: DEBUG nova.compute.manager [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Received event network-changed-560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.838071] env[61905]: DEBUG nova.compute.manager [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Refreshing instance network info cache due to event network-changed-560cc744-85c9-41e0-9cca-2eccf7ca8535. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1007.838295] env[61905]: DEBUG oslo_concurrency.lockutils [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] Acquiring lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.838443] env[61905]: DEBUG oslo_concurrency.lockutils [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] Acquired lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.838639] env[61905]: DEBUG nova.network.neutron [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Refreshing network info cache for port 560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.160636] env[61905]: DEBUG oslo_concurrency.lockutils [None req-91ef21da-cb0c-4910-b74e-a0a5e1d5e62b tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.483s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.256960] env[61905]: DEBUG nova.scheduler.client.report [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.307170] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5257ee20-4ba9-ca80-666a-87d474c5b6f3, 'name': SearchDatastore_Task, 'duration_secs': 0.026032} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.307170] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.307170] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20/7e57b01e-3c79-4c6d-8e1a-983e2fb0df20.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.307170] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c789b62b-23e6-4641-9379-78b890d72cae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.315018] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1008.315018] env[61905]: value = "task-1362890" [ 1008.315018] env[61905]: _type = "Task" [ 1008.315018] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.322297] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.763559] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.828980] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362890, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.831885] env[61905]: INFO nova.scheduler.client.report [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocations for instance fb417a53-b6df-4566-87f2-bd56dafd789c [ 1008.866295] env[61905]: DEBUG nova.network.neutron [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updated VIF entry in instance network info cache for port 560cc744-85c9-41e0-9cca-2eccf7ca8535. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.866614] env[61905]: DEBUG nova.network.neutron [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updating instance_info_cache with network_info: [{"id": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "address": "fa:16:3e:d7:9a:ba", "network": {"id": "3d117fab-65b4-4a5b-9b3c-c4091cf6f24c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-335702686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f90126694e884e2dbc7f57b7adb96afa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap560cc744-85", "ovs_interfaceid": "560cc744-85c9-41e0-9cca-2eccf7ca8535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.917466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.917466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.917656] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.917755] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.917954] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.922084] env[61905]: INFO nova.compute.manager [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Terminating instance [ 1008.924610] env[61905]: DEBUG nova.compute.manager [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1008.925015] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.925980] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e70aacd-c302-4dc3-8bf2-c1cdb063dd66 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.936037] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.937127] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bafab560-472a-4a87-b1db-8906b85a2cb0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.944778] env[61905]: DEBUG oslo_vmware.api [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1008.944778] env[61905]: value = "task-1362891" [ 1008.944778] env[61905]: _type = "Task" [ 1008.944778] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.956284] env[61905]: DEBUG oslo_vmware.api [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.324443] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.853229} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.324759] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20/7e57b01e-3c79-4c6d-8e1a-983e2fb0df20.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1009.325050] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1009.325343] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd50f534-8f98-4a06-bc7e-5947d034dd25 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.332158] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1009.332158] env[61905]: value = "task-1362892" [ 1009.332158] env[61905]: _type = "Task" [ 1009.332158] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.342107] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.343276] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3e264d01-ea36-4077-b2da-4df11a1fad7a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "fb417a53-b6df-4566-87f2-bd56dafd789c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.329s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.368876] env[61905]: DEBUG oslo_concurrency.lockutils [req-172ccce8-94f2-42b4-9b1c-0c6009186f4d req-42ff588c-5114-4097-93db-ce3645425de2 service nova] Releasing lock "refresh_cache-15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.412358] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "817117bb-1728-42a1-ac2a-6ba284c65fa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.412950] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.456782] env[61905]: DEBUG oslo_vmware.api [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362891, 'name': PowerOffVM_Task, 'duration_secs': 0.45124} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.456782] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.457217] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.457322] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d43948f-b7c7-4bad-978e-a33df46b8116 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.616432] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.616997] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.617246] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleting the datastore file [datastore2] 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.617510] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d9634da-8e4c-4c40-a764-5171c0a59813 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.623686] env[61905]: DEBUG oslo_vmware.api [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1009.623686] env[61905]: value = "task-1362894" [ 1009.623686] env[61905]: _type = "Task" [ 1009.623686] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.631057] env[61905]: DEBUG oslo_vmware.api [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.841901] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071994} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.842323] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1009.842943] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50137b83-9fe8-443c-a458-1dd01b1730bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.864264] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20/7e57b01e-3c79-4c6d-8e1a-983e2fb0df20.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.864531] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37c88f61-8573-43ec-8c0a-4284a3396eb2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.883470] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1009.883470] env[61905]: value = "task-1362895" [ 1009.883470] env[61905]: _type = "Task" [ 1009.883470] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.893378] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362895, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.915982] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1010.134153] env[61905]: DEBUG oslo_vmware.api [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.439434} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.134399] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.134647] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.134886] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.135125] env[61905]: INFO nova.compute.manager [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1010.135418] env[61905]: DEBUG oslo.service.loopingcall [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.135666] env[61905]: DEBUG nova.compute.manager [-] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1010.135775] env[61905]: DEBUG nova.network.neutron [-] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1010.284331] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "49305caf-e169-4c03-9968-be40567b92c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.284608] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.393268] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362895, 'name': ReconfigVM_Task, 'duration_secs': 0.320975} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.394146] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20/7e57b01e-3c79-4c6d-8e1a-983e2fb0df20.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.395022] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba4c7f0a-3fe5-4ead-81d3-ed8076eb68bc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.402220] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1010.402220] env[61905]: value = "task-1362896" [ 1010.402220] env[61905]: _type = "Task" [ 1010.402220] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.407371] env[61905]: DEBUG nova.compute.manager [req-c0cccf88-6c87-46c4-a4a4-d241dc955dc0 req-75d1113f-9a1d-4467-b9ab-42954c17adeb service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Received event network-vif-deleted-799ace69-512b-4a5b-bc68-bb41890393e7 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.407551] env[61905]: INFO nova.compute.manager [req-c0cccf88-6c87-46c4-a4a4-d241dc955dc0 req-75d1113f-9a1d-4467-b9ab-42954c17adeb service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Neutron deleted interface 799ace69-512b-4a5b-bc68-bb41890393e7; detaching it from the instance and deleting it from the info cache [ 1010.407723] env[61905]: DEBUG nova.network.neutron [req-c0cccf88-6c87-46c4-a4a4-d241dc955dc0 req-75d1113f-9a1d-4467-b9ab-42954c17adeb service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.416231] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362896, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.438183] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.438452] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.442367] env[61905]: INFO nova.compute.claims [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1010.787023] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1010.888196] env[61905]: DEBUG nova.network.neutron [-] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.912424] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362896, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.912683] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4c073d7-7340-4f1a-9cfb-dd79ea3f3636 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.921678] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ea3f9b-2de8-4d0d-9e36-7a08ee193df1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.949912] env[61905]: DEBUG nova.compute.manager [req-c0cccf88-6c87-46c4-a4a4-d241dc955dc0 req-75d1113f-9a1d-4467-b9ab-42954c17adeb service nova] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Detach interface failed, port_id=799ace69-512b-4a5b-bc68-bb41890393e7, reason: Instance 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1011.309339] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.391197] env[61905]: INFO nova.compute.manager [-] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Took 1.26 seconds to deallocate network for instance. [ 1011.411609] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362896, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.538875] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb81e2d-fb4b-4aad-a607-3c313532e56e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.546736] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d666581-26ed-4d54-aa6f-f182e949ca0a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.578489] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4353942-47c5-455b-b973-ce988103e30e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.586014] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122d6c83-ebf8-40c5-94ad-46a40ad10998 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.598991] env[61905]: DEBUG nova.compute.provider_tree [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.897991] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.915162] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362896, 'name': Rename_Task, 'duration_secs': 1.155954} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.915500] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.915840] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a54c523b-21af-4e4e-bd38-1858d13efcb7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.922911] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1011.922911] env[61905]: value = "task-1362897" [ 1011.922911] env[61905]: _type = "Task" [ 1011.922911] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.939385] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.101904] env[61905]: DEBUG nova.scheduler.client.report [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1012.433462] env[61905]: DEBUG oslo_vmware.api [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362897, 'name': PowerOnVM_Task, 'duration_secs': 0.452686} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.433769] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.433980] env[61905]: INFO nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Took 9.56 seconds to spawn the instance on the hypervisor. [ 1012.434236] env[61905]: DEBUG nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1012.435084] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bd3f54-b17c-4a68-96d2-18bf2fec4003 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.606804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.607316] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1012.609821] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.301s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.611209] env[61905]: INFO nova.compute.claims [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.952860] env[61905]: INFO nova.compute.manager [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Took 14.94 seconds to build instance. [ 1013.116255] env[61905]: DEBUG nova.compute.utils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1013.119584] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1013.119753] env[61905]: DEBUG nova.network.neutron [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1013.169566] env[61905]: DEBUG nova.policy [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '947946764fc64847946057d867de54bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '980cc259c0254e84989e0cfc0e45837f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1013.412745] env[61905]: DEBUG nova.network.neutron [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Successfully created port: 7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.454865] env[61905]: DEBUG oslo_concurrency.lockutils [None req-39c6d857-7fbd-492c-88f6-d5a3d1f619dc tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.450s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.620096] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1013.725165] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502da468-4988-4b78-b4e7-8b4d52013a17 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.733160] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70d10ec-daa1-4493-b547-690fd3690cb8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.762301] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d766a1-6bad-4932-9c9c-258b6653775b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.769595] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d43012-2637-461d-802b-35126a01d18f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.782479] env[61905]: DEBUG nova.compute.provider_tree [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.067323] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.067641] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.067842] env[61905]: DEBUG nova.compute.manager [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1014.069249] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3347d94-6f33-41e4-aadf-57c769e1e49a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.075746] env[61905]: DEBUG nova.compute.manager [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61905) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1014.076368] env[61905]: DEBUG nova.objects.instance [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lazy-loading 'flavor' on Instance uuid 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.286666] env[61905]: DEBUG nova.scheduler.client.report [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.580946] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1014.581221] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d8f54a6-ec69-4202-a7fd-78093f74fb88 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.588609] env[61905]: DEBUG oslo_vmware.api [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1014.588609] env[61905]: value = "task-1362898" [ 1014.588609] env[61905]: _type = "Task" [ 1014.588609] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.596652] env[61905]: DEBUG oslo_vmware.api [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.632972] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1014.657305] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1014.657554] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1014.657712] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.657893] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1014.658050] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.658223] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1014.658441] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1014.658659] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1014.658845] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1014.659015] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1014.659191] env[61905]: DEBUG nova.virt.hardware [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1014.660041] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5b54da-bd6b-4710-8b65-42a8d7c90ef2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.667868] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7402ca-9014-4286-a541-effcc8796b9b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.793056] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.183s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.794140] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1014.797062] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.899s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.797419] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.819925] env[61905]: INFO nova.scheduler.client.report [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocations for instance 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9 [ 1014.935965] env[61905]: DEBUG nova.compute.manager [req-d9033380-7b52-4f24-a57d-66cefb22117d req-7677d851-6846-4eb2-8d77-fb3a106d5e87 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Received event network-vif-plugged-7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.936177] env[61905]: DEBUG oslo_concurrency.lockutils [req-d9033380-7b52-4f24-a57d-66cefb22117d req-7677d851-6846-4eb2-8d77-fb3a106d5e87 service nova] Acquiring lock "817117bb-1728-42a1-ac2a-6ba284c65fa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.936392] env[61905]: DEBUG oslo_concurrency.lockutils [req-d9033380-7b52-4f24-a57d-66cefb22117d req-7677d851-6846-4eb2-8d77-fb3a106d5e87 service nova] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.936566] env[61905]: DEBUG oslo_concurrency.lockutils [req-d9033380-7b52-4f24-a57d-66cefb22117d req-7677d851-6846-4eb2-8d77-fb3a106d5e87 service nova] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.936736] env[61905]: DEBUG nova.compute.manager [req-d9033380-7b52-4f24-a57d-66cefb22117d req-7677d851-6846-4eb2-8d77-fb3a106d5e87 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] No waiting events found dispatching network-vif-plugged-7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1014.937065] env[61905]: WARNING nova.compute.manager [req-d9033380-7b52-4f24-a57d-66cefb22117d req-7677d851-6846-4eb2-8d77-fb3a106d5e87 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Received unexpected event network-vif-plugged-7f522468-91eb-47a0-9c3f-0a774adf4dbb for instance with vm_state building and task_state spawning. [ 1015.022485] env[61905]: DEBUG nova.network.neutron [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Successfully updated port: 7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.099264] env[61905]: DEBUG oslo_vmware.api [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362898, 'name': PowerOffVM_Task, 'duration_secs': 0.196396} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.099616] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1015.099695] env[61905]: DEBUG nova.compute.manager [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1015.100440] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6654db86-10e6-4ae5-ae4b-62c007fbab2e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.293396] env[61905]: DEBUG oslo_concurrency.lockutils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.293653] env[61905]: DEBUG oslo_concurrency.lockutils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.302411] env[61905]: DEBUG nova.compute.utils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1015.304127] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1015.304263] env[61905]: DEBUG nova.network.neutron [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1015.327231] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f8a9ab4-8b62-4842-b955-8d0a180295a1 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "020dc38a-d4ea-41fa-a3aa-3eb63b3516d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.410s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.354179] env[61905]: DEBUG nova.policy [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f27bcad7ab3b4e0e98065f24300f9425', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30edd7bc94ee492cb7f4e4f388e45b8b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1015.524873] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "refresh_cache-817117bb-1728-42a1-ac2a-6ba284c65fa3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.525046] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "refresh_cache-817117bb-1728-42a1-ac2a-6ba284c65fa3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.525202] env[61905]: DEBUG nova.network.neutron [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.587741] env[61905]: DEBUG nova.network.neutron [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Successfully created port: 27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1015.612825] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7f70a8e7-1b3d-4e35-a2a2-5b708b654cee tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.545s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.796473] env[61905]: DEBUG nova.compute.utils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1015.807311] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1016.063505] env[61905]: DEBUG nova.network.neutron [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.192818] env[61905]: DEBUG nova.network.neutron [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Updating instance_info_cache with network_info: [{"id": "7f522468-91eb-47a0-9c3f-0a774adf4dbb", "address": "fa:16:3e:f8:bb:ee", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f522468-91", "ovs_interfaceid": "7f522468-91eb-47a0-9c3f-0a774adf4dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.299945] env[61905]: DEBUG oslo_concurrency.lockutils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.695760] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "refresh_cache-817117bb-1728-42a1-ac2a-6ba284c65fa3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.696322] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Instance network_info: |[{"id": "7f522468-91eb-47a0-9c3f-0a774adf4dbb", "address": "fa:16:3e:f8:bb:ee", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f522468-91", "ovs_interfaceid": "7f522468-91eb-47a0-9c3f-0a774adf4dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1016.696818] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:bb:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ce62383-8e84-4e26-955b-74c11392f4c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f522468-91eb-47a0-9c3f-0a774adf4dbb', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.704698] env[61905]: DEBUG oslo.service.loopingcall [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.704899] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.705152] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e2056c1-9206-485c-97c5-7e9b6ac29586 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.727336] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.727336] env[61905]: value = "task-1362899" [ 1016.727336] env[61905]: _type = "Task" [ 1016.727336] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.738607] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362899, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.816104] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1016.844758] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1016.845306] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1016.845306] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.845537] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1016.845665] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.845761] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1016.846258] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1016.846258] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1016.846390] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1016.846812] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1016.846812] env[61905]: DEBUG nova.virt.hardware [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1016.847672] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f613af-ece5-4e08-9b5d-4177fe45b0ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.855953] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f866d66-ef3e-4020-9700-01b7a1f830d5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.870239] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.870521] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.870861] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.870976] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.871195] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.876926] env[61905]: INFO nova.compute.manager [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Terminating instance [ 1016.878465] env[61905]: DEBUG nova.compute.manager [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1016.878632] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.879483] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6fe612-5239-49af-8a38-10cabf733521 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.888343] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.888598] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcb55880-44fe-45aa-a4b0-bf1d14173311 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.961535] env[61905]: DEBUG nova.compute.manager [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Received event network-changed-7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1016.961794] env[61905]: DEBUG nova.compute.manager [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Refreshing instance network info cache due to event network-changed-7f522468-91eb-47a0-9c3f-0a774adf4dbb. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1016.962837] env[61905]: DEBUG oslo_concurrency.lockutils [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] Acquiring lock "refresh_cache-817117bb-1728-42a1-ac2a-6ba284c65fa3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.962837] env[61905]: DEBUG oslo_concurrency.lockutils [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] Acquired lock "refresh_cache-817117bb-1728-42a1-ac2a-6ba284c65fa3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.962837] env[61905]: DEBUG nova.network.neutron [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Refreshing network info cache for port 7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.064533] env[61905]: DEBUG nova.network.neutron [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Successfully updated port: 27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1017.066938] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.067148] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.067468] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleting the datastore file [datastore2] 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.068382] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42a26d34-23a0-4f3c-b37c-63191e028738 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.074665] env[61905]: DEBUG oslo_vmware.api [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for the task: (returnval){ [ 1017.074665] env[61905]: value = "task-1362901" [ 1017.074665] env[61905]: _type = "Task" [ 1017.074665] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.083662] env[61905]: DEBUG oslo_vmware.api [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.236734] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362899, 'name': CreateVM_Task, 'duration_secs': 0.323185} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.237090] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.237608] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.237777] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.238115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1017.238373] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55d20c02-bb1e-4ae5-8b46-22ad112abc2c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.242591] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1017.242591] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e28131-d3b7-e212-b4c0-f751a0735257" [ 1017.242591] env[61905]: _type = "Task" [ 1017.242591] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.249782] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e28131-d3b7-e212-b4c0-f751a0735257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.378790] env[61905]: DEBUG oslo_concurrency.lockutils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.379081] env[61905]: DEBUG oslo_concurrency.lockutils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.379326] env[61905]: INFO nova.compute.manager [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Attaching volume 176d041a-4619-4a02-8a96-6f7ebfd757ae to /dev/sdb [ 1017.413488] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a166a25e-f252-40e4-acaa-bd876faef707 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.420327] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f72a6c6-b256-443e-afbc-d96be99e2cbd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.433171] env[61905]: DEBUG nova.virt.block_device [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating existing volume attachment record: ab2811bf-7e47-4ff8-a099-6314ca113c44 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1017.568199] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.568358] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.568503] env[61905]: DEBUG nova.network.neutron [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.585912] env[61905]: DEBUG oslo_vmware.api [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Task: {'id': task-1362901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127238} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.586767] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.586982] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.587183] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.587360] env[61905]: INFO nova.compute.manager [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Took 0.71 seconds to destroy the instance on the hypervisor. [ 1017.587603] env[61905]: DEBUG oslo.service.loopingcall [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.587815] env[61905]: DEBUG nova.compute.manager [-] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1017.587914] env[61905]: DEBUG nova.network.neutron [-] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1017.753240] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52e28131-d3b7-e212-b4c0-f751a0735257, 'name': SearchDatastore_Task, 'duration_secs': 0.010209} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.753541] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.753815] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.754101] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.754286] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.754498] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.754781] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8363b19-77ce-458e-8cdd-7b4348d0fd8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.763128] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.763397] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.764139] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74caa57f-3e06-4a1b-8129-a5d8956f5dc2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.769385] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1017.769385] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5287d5f9-7888-82aa-0b12-c60ed14787c3" [ 1017.769385] env[61905]: _type = "Task" [ 1017.769385] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.773282] env[61905]: DEBUG nova.network.neutron [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Updated VIF entry in instance network info cache for port 7f522468-91eb-47a0-9c3f-0a774adf4dbb. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.773662] env[61905]: DEBUG nova.network.neutron [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Updating instance_info_cache with network_info: [{"id": "7f522468-91eb-47a0-9c3f-0a774adf4dbb", "address": "fa:16:3e:f8:bb:ee", "network": {"id": "ac8703f0-cb36-4e56-ad5b-5db211cc6f00", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-324863126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "980cc259c0254e84989e0cfc0e45837f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ce62383-8e84-4e26-955b-74c11392f4c9", "external-id": "nsx-vlan-transportzone-215", "segmentation_id": 215, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f522468-91", "ovs_interfaceid": "7f522468-91eb-47a0-9c3f-0a774adf4dbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.782772] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5287d5f9-7888-82aa-0b12-c60ed14787c3, 'name': SearchDatastore_Task, 'duration_secs': 0.007326} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.783413] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d2148cf-a8b0-49b9-8f56-8c52e8809155 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.788831] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1017.788831] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fefb15-9c82-4456-3943-e8a7737468f4" [ 1017.788831] env[61905]: _type = "Task" [ 1017.788831] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.796802] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fefb15-9c82-4456-3943-e8a7737468f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.102321] env[61905]: DEBUG nova.network.neutron [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1018.223263] env[61905]: DEBUG nova.network.neutron [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [{"id": "27393fae-ed78-4714-bde6-c887048264a3", "address": "fa:16:3e:b9:46:80", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27393fae-ed", "ovs_interfaceid": "27393fae-ed78-4714-bde6-c887048264a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.276647] env[61905]: DEBUG oslo_concurrency.lockutils [req-b8fe1813-a9ab-48b4-a1bd-b62adf226c4e req-4f97b735-23aa-40ca-96ce-f34a202429b8 service nova] Releasing lock "refresh_cache-817117bb-1728-42a1-ac2a-6ba284c65fa3" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.302048] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fefb15-9c82-4456-3943-e8a7737468f4, 'name': SearchDatastore_Task, 'duration_secs': 0.008291} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.302048] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.302048] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 817117bb-1728-42a1-ac2a-6ba284c65fa3/817117bb-1728-42a1-ac2a-6ba284c65fa3.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.302048] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a728add1-0814-4267-a2a3-ea708bfe6331 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.309286] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1018.309286] env[61905]: value = "task-1362905" [ 1018.309286] env[61905]: _type = "Task" [ 1018.309286] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.316826] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.389334] env[61905]: DEBUG nova.network.neutron [-] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.725490] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.725934] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Instance network_info: |[{"id": "27393fae-ed78-4714-bde6-c887048264a3", "address": "fa:16:3e:b9:46:80", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27393fae-ed", "ovs_interfaceid": "27393fae-ed78-4714-bde6-c887048264a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1018.726568] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:46:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27393fae-ed78-4714-bde6-c887048264a3', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.736142] env[61905]: DEBUG oslo.service.loopingcall [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.736522] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.736647] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74e8b08a-1287-403d-befd-0ad7f60d28e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.757804] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.757804] env[61905]: value = "task-1362906" [ 1018.757804] env[61905]: _type = "Task" [ 1018.757804] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.765750] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362906, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.821088] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505554} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.821360] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 817117bb-1728-42a1-ac2a-6ba284c65fa3/817117bb-1728-42a1-ac2a-6ba284c65fa3.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1018.821582] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1018.821921] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2e20986-c853-4fbc-90e6-e3e4df8ea925 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.833064] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1018.833064] env[61905]: value = "task-1362907" [ 1018.833064] env[61905]: _type = "Task" [ 1018.833064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.841391] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.892422] env[61905]: INFO nova.compute.manager [-] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Took 1.30 seconds to deallocate network for instance. [ 1018.990508] env[61905]: DEBUG nova.compute.manager [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Received event network-vif-plugged-27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.990794] env[61905]: DEBUG oslo_concurrency.lockutils [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] Acquiring lock "49305caf-e169-4c03-9968-be40567b92c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.991025] env[61905]: DEBUG oslo_concurrency.lockutils [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] Lock "49305caf-e169-4c03-9968-be40567b92c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.991216] env[61905]: DEBUG oslo_concurrency.lockutils [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] Lock "49305caf-e169-4c03-9968-be40567b92c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.991389] env[61905]: DEBUG nova.compute.manager [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] No waiting events found dispatching network-vif-plugged-27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1018.991559] env[61905]: WARNING nova.compute.manager [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Received unexpected event network-vif-plugged-27393fae-ed78-4714-bde6-c887048264a3 for instance with vm_state building and task_state spawning. [ 1018.991758] env[61905]: DEBUG nova.compute.manager [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Received event network-changed-27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1018.991924] env[61905]: DEBUG nova.compute.manager [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Refreshing instance network info cache due to event network-changed-27393fae-ed78-4714-bde6-c887048264a3. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1018.992191] env[61905]: DEBUG oslo_concurrency.lockutils [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] Acquiring lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.992379] env[61905]: DEBUG oslo_concurrency.lockutils [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] Acquired lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.992553] env[61905]: DEBUG nova.network.neutron [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Refreshing network info cache for port 27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1019.268096] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362906, 'name': CreateVM_Task, 'duration_secs': 0.31636} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.268247] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.268913] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.269107] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.269421] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1019.269667] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49a4e5a7-5cfe-484b-b0cc-9bb514100b00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.274519] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1019.274519] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52952e8b-b73c-b1a9-1e5c-530518b4d115" [ 1019.274519] env[61905]: _type = "Task" [ 1019.274519] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.282028] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52952e8b-b73c-b1a9-1e5c-530518b4d115, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.342441] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065729} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.342710] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.343485] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c3f5c8-d3bd-4b33-8507-cf8039f7a882 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.365733] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 817117bb-1728-42a1-ac2a-6ba284c65fa3/817117bb-1728-42a1-ac2a-6ba284c65fa3.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.366034] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32f423fc-f750-43b9-bbc9-83311bbddb6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.385558] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1019.385558] env[61905]: value = "task-1362908" [ 1019.385558] env[61905]: _type = "Task" [ 1019.385558] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.394999] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362908, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.399086] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.399343] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.399558] env[61905]: DEBUG nova.objects.instance [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lazy-loading 'resources' on Instance uuid 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.707630] env[61905]: DEBUG nova.network.neutron [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updated VIF entry in instance network info cache for port 27393fae-ed78-4714-bde6-c887048264a3. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1019.708063] env[61905]: DEBUG nova.network.neutron [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [{"id": "27393fae-ed78-4714-bde6-c887048264a3", "address": "fa:16:3e:b9:46:80", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27393fae-ed", "ovs_interfaceid": "27393fae-ed78-4714-bde6-c887048264a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.786466] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52952e8b-b73c-b1a9-1e5c-530518b4d115, 'name': SearchDatastore_Task, 'duration_secs': 0.009769} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.786600] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.786853] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.787130] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.787299] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.787500] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.787772] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87006c0b-8baf-41f2-a634-1916cb3acac9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.796757] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.796991] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.797705] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6a9fa02-f5aa-4f30-a7e9-9c692907df7d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.803007] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1019.803007] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5249fe68-d45c-3d8b-3186-de1b25fab7e5" [ 1019.803007] env[61905]: _type = "Task" [ 1019.803007] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.810436] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5249fe68-d45c-3d8b-3186-de1b25fab7e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.896018] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.003886] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd314e95-699e-4d47-9116-d2225731b20c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.011503] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5450c9c4-1931-48d8-9e64-372f09c01d7c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.041630] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb0190b-5c07-4686-951d-31555009f75b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.048825] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfed334-24b3-43f2-a788-ee1b01ae3e3b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.061728] env[61905]: DEBUG nova.compute.provider_tree [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.210877] env[61905]: DEBUG oslo_concurrency.lockutils [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] Releasing lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.211165] env[61905]: DEBUG nova.compute.manager [req-eff7bea4-04f4-4ca4-8c2e-c0ec6cf77715 req-ea4998bb-8006-47bd-870b-0e8ba166203b service nova] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Received event network-vif-deleted-2f77e83c-7f69-45c3-bbef-b3273f2f1a5e {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1020.314662] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5249fe68-d45c-3d8b-3186-de1b25fab7e5, 'name': SearchDatastore_Task, 'duration_secs': 0.017527} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.315391] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-168508c3-75c5-49cd-a711-5f4547ea8d0e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.320201] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1020.320201] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cfa38a-1c83-faf6-1d65-20717bc36fdd" [ 1020.320201] env[61905]: _type = "Task" [ 1020.320201] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.327800] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cfa38a-1c83-faf6-1d65-20717bc36fdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.394858] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362908, 'name': ReconfigVM_Task, 'duration_secs': 0.671427} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.395136] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 817117bb-1728-42a1-ac2a-6ba284c65fa3/817117bb-1728-42a1-ac2a-6ba284c65fa3.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.395737] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-335752d3-b3e6-4187-b1b8-aba16606178a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.401588] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1020.401588] env[61905]: value = "task-1362910" [ 1020.401588] env[61905]: _type = "Task" [ 1020.401588] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.408447] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362910, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.564891] env[61905]: DEBUG nova.scheduler.client.report [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.830406] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52cfa38a-1c83-faf6-1d65-20717bc36fdd, 'name': SearchDatastore_Task, 'duration_secs': 0.008979} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.830639] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.830925] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 49305caf-e169-4c03-9968-be40567b92c7/49305caf-e169-4c03-9968-be40567b92c7.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.831198] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc161664-359e-4cc3-9d5d-c43c65065edb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.837463] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1020.837463] env[61905]: value = "task-1362911" [ 1020.837463] env[61905]: _type = "Task" [ 1020.837463] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.845091] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.912178] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362910, 'name': Rename_Task, 'duration_secs': 0.12246} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.912494] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.912754] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dde7cc49-8214-4de1-97a5-b16ee5c867aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.919509] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1020.919509] env[61905]: value = "task-1362912" [ 1020.919509] env[61905]: _type = "Task" [ 1020.919509] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.927190] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.070382] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.091902] env[61905]: INFO nova.scheduler.client.report [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Deleted allocations for instance 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20 [ 1021.347166] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362911, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467173} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.347511] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 49305caf-e169-4c03-9968-be40567b92c7/49305caf-e169-4c03-9968-be40567b92c7.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1021.347579] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1021.347828] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9e50fbe-f84b-4bbb-b9a4-e638717acaf3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.354697] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1021.354697] env[61905]: value = "task-1362913" [ 1021.354697] env[61905]: _type = "Task" [ 1021.354697] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.362859] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.429674] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362912, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.600335] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e5568933-2fce-41bf-9a6e-bd1f663a47ab tempest-ServersTestJSON-1418293372 tempest-ServersTestJSON-1418293372-project-member] Lock "7e57b01e-3c79-4c6d-8e1a-983e2fb0df20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.730s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.865211] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055425} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.865445] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1021.866300] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dc6206-0358-4c81-80bc-d220d66296e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.889253] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 49305caf-e169-4c03-9968-be40567b92c7/49305caf-e169-4c03-9968-be40567b92c7.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1021.889494] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83ca2bcb-dfe1-4783-9930-0013b1793f2e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.908284] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1021.908284] env[61905]: value = "task-1362914" [ 1021.908284] env[61905]: _type = "Task" [ 1021.908284] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.917366] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362914, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.930060] env[61905]: DEBUG oslo_vmware.api [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362912, 'name': PowerOnVM_Task, 'duration_secs': 0.663162} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.930402] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.930712] env[61905]: INFO nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1021.930971] env[61905]: DEBUG nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1021.931911] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ed60e3-697d-461b-900e-de2c8e2cff12 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.976121] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1021.976407] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290104', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'name': 'volume-176d041a-4619-4a02-8a96-6f7ebfd757ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '02a40a20-1506-48f2-bbd2-db62e5dfa166', 'attached_at': '', 'detached_at': '', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'serial': '176d041a-4619-4a02-8a96-6f7ebfd757ae'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1021.977600] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525b4bca-6898-4166-82ac-006cf95ab592 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.994481] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f251ac9-5afa-465e-9a14-f7f7650265db {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.019755] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] volume-176d041a-4619-4a02-8a96-6f7ebfd757ae/volume-176d041a-4619-4a02-8a96-6f7ebfd757ae.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1022.020098] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-087ae0af-97f0-4b9c-9d3d-d393cd5d6098 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.040500] env[61905]: DEBUG oslo_vmware.api [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1022.040500] env[61905]: value = "task-1362915" [ 1022.040500] env[61905]: _type = "Task" [ 1022.040500] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.050047] env[61905]: DEBUG oslo_vmware.api [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362915, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.420775] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362914, 'name': ReconfigVM_Task, 'duration_secs': 0.272165} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.421104] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 49305caf-e169-4c03-9968-be40567b92c7/49305caf-e169-4c03-9968-be40567b92c7.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1022.421760] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2a7bd00-a1fc-4d6c-a0da-468931cf948c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.427433] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1022.427433] env[61905]: value = "task-1362916" [ 1022.427433] env[61905]: _type = "Task" [ 1022.427433] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.440245] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362916, 'name': Rename_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.449968] env[61905]: INFO nova.compute.manager [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Took 12.03 seconds to build instance. [ 1022.550984] env[61905]: DEBUG oslo_vmware.api [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362915, 'name': ReconfigVM_Task, 'duration_secs': 0.32734} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.551296] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Reconfigured VM instance instance-0000005e to attach disk [datastore2] volume-176d041a-4619-4a02-8a96-6f7ebfd757ae/volume-176d041a-4619-4a02-8a96-6f7ebfd757ae.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1022.556139] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c127f856-ce84-4d34-a018-3221332bc7c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.571126] env[61905]: DEBUG oslo_vmware.api [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1022.571126] env[61905]: value = "task-1362917" [ 1022.571126] env[61905]: _type = "Task" [ 1022.571126] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.579338] env[61905]: DEBUG oslo_vmware.api [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362917, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.940629] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362916, 'name': Rename_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.951458] env[61905]: DEBUG oslo_concurrency.lockutils [None req-414640d2-8201-4ad8-9d22-d9a12d325019 tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.539s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.083580] env[61905]: DEBUG oslo_vmware.api [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362917, 'name': ReconfigVM_Task, 'duration_secs': 0.125866} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.083919] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290104', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'name': 'volume-176d041a-4619-4a02-8a96-6f7ebfd757ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '02a40a20-1506-48f2-bbd2-db62e5dfa166', 'attached_at': '', 'detached_at': '', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'serial': '176d041a-4619-4a02-8a96-6f7ebfd757ae'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1023.438224] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362916, 'name': Rename_Task, 'duration_secs': 0.835692} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.438504] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.438746] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97ab6e95-6c5f-46f7-9f3b-07952cbb8020 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.444381] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1023.444381] env[61905]: value = "task-1362918" [ 1023.444381] env[61905]: _type = "Task" [ 1023.444381] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.453225] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.591702] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "817117bb-1728-42a1-ac2a-6ba284c65fa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.591702] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.591702] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "817117bb-1728-42a1-ac2a-6ba284c65fa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.591889] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.592135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.593967] env[61905]: INFO nova.compute.manager [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Terminating instance [ 1023.596050] env[61905]: DEBUG nova.compute.manager [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1023.596256] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1023.597082] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3678becf-3811-465a-afd4-c0afae460c8a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.607018] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.607018] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd3e2c04-1a71-43c5-aa92-44d8b89ebb1d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.612062] env[61905]: DEBUG oslo_vmware.api [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1023.612062] env[61905]: value = "task-1362919" [ 1023.612062] env[61905]: _type = "Task" [ 1023.612062] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.621732] env[61905]: DEBUG oslo_vmware.api [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.954886] env[61905]: DEBUG oslo_vmware.api [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362918, 'name': PowerOnVM_Task, 'duration_secs': 0.436419} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.955349] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.955677] env[61905]: INFO nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Took 7.14 seconds to spawn the instance on the hypervisor. [ 1023.955976] env[61905]: DEBUG nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1023.956954] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8261f4-3211-46a6-9dbb-85e260f236fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.122581] env[61905]: DEBUG oslo_vmware.api [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362919, 'name': PowerOffVM_Task, 'duration_secs': 0.17158} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.122846] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.123027] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.123274] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce15ac83-2c0d-42a7-8af6-583fa6028b0b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.125251] env[61905]: DEBUG nova.objects.instance [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'flavor' on Instance uuid 02a40a20-1506-48f2-bbd2-db62e5dfa166 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.187216] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.187281] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.187608] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleting the datastore file [datastore2] 817117bb-1728-42a1-ac2a-6ba284c65fa3 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.187754] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2394c426-3aa6-4bd2-9207-ab8bf5b50f12 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.193818] env[61905]: DEBUG oslo_vmware.api [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for the task: (returnval){ [ 1024.193818] env[61905]: value = "task-1362921" [ 1024.193818] env[61905]: _type = "Task" [ 1024.193818] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.201646] env[61905]: DEBUG oslo_vmware.api [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.475747] env[61905]: INFO nova.compute.manager [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Took 13.18 seconds to build instance. [ 1024.629846] env[61905]: DEBUG oslo_concurrency.lockutils [None req-12925a53-cd31-4088-a334-195683dc1db5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.251s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.704729] env[61905]: DEBUG oslo_vmware.api [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Task: {'id': task-1362921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243545} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.705038] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.705250] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.705673] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.705673] env[61905]: INFO nova.compute.manager [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1024.705836] env[61905]: DEBUG oslo.service.loopingcall [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1024.706392] env[61905]: DEBUG nova.compute.manager [-] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1024.706493] env[61905]: DEBUG nova.network.neutron [-] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1024.981126] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ce2ae0e4-4fdd-47f7-87bf-f324c055ac87 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.696s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.014021] env[61905]: DEBUG nova.compute.manager [req-fd654e19-77a5-4c1b-ab0a-753718dea184 req-051c6913-7b51-4c32-b0e4-8043a6d4382c service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Received event network-vif-deleted-7f522468-91eb-47a0-9c3f-0a774adf4dbb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.014021] env[61905]: INFO nova.compute.manager [req-fd654e19-77a5-4c1b-ab0a-753718dea184 req-051c6913-7b51-4c32-b0e4-8043a6d4382c service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Neutron deleted interface 7f522468-91eb-47a0-9c3f-0a774adf4dbb; detaching it from the instance and deleting it from the info cache [ 1025.014021] env[61905]: DEBUG nova.network.neutron [req-fd654e19-77a5-4c1b-ab0a-753718dea184 req-051c6913-7b51-4c32-b0e4-8043a6d4382c service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.486499] env[61905]: DEBUG nova.network.neutron [-] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.518955] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11ab9389-ea73-43fa-81e1-451e2dc99f51 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.529141] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0493c50b-13d8-4c78-a4e4-48852088374f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.553313] env[61905]: DEBUG nova.compute.manager [req-fd654e19-77a5-4c1b-ab0a-753718dea184 req-051c6913-7b51-4c32-b0e4-8043a6d4382c service nova] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Detach interface failed, port_id=7f522468-91eb-47a0-9c3f-0a774adf4dbb, reason: Instance 817117bb-1728-42a1-ac2a-6ba284c65fa3 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1025.675156] env[61905]: DEBUG nova.compute.manager [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Stashing vm_state: active {{(pid=61905) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1025.960737] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "c027ecd4-9502-4333-b0ac-315be7240d6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.961035] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.988101] env[61905]: INFO nova.compute.manager [-] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Took 1.28 seconds to deallocate network for instance. [ 1026.192868] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.193165] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.464991] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1026.494081] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.697985] env[61905]: INFO nova.compute.claims [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1026.982615] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.204544] env[61905]: INFO nova.compute.resource_tracker [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating resource usage from migration b37fb18c-ee8e-4f45-af2d-1d392bf94fec [ 1027.285052] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4699718-dd3f-4ce1-aca6-bc0c66f7a1ea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.292796] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a196cb0-90f1-49aa-aba2-b865df935006 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.321695] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7da71f6-d999-479a-aaa8-97f2f2621bf3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.328796] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c50abf-dc93-49bd-af36-bcae90000cce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.342412] env[61905]: DEBUG nova.compute.provider_tree [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.845980] env[61905]: DEBUG nova.scheduler.client.report [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1028.351085] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.158s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.351376] env[61905]: INFO nova.compute.manager [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Migrating [ 1028.357998] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.864s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.358239] env[61905]: DEBUG nova.objects.instance [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lazy-loading 'resources' on Instance uuid 817117bb-1728-42a1-ac2a-6ba284c65fa3 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.451263] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57d8d5c-0754-4133-8734-b79efd42efca {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.459030] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70882e8b-be2d-495d-9709-9b394700c4d8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.487403] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a638a1a-3a1e-437b-8a20-feec8f7276d1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.494143] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4821b5-3fc7-45ba-8f65-ccf03db2a563 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.508088] env[61905]: DEBUG nova.compute.provider_tree [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.867957] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.868318] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.868814] env[61905]: DEBUG nova.network.neutron [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1029.010583] env[61905]: DEBUG nova.scheduler.client.report [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.515087] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.157s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.517225] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.534s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.518729] env[61905]: INFO nova.compute.claims [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.533760] env[61905]: INFO nova.scheduler.client.report [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Deleted allocations for instance 817117bb-1728-42a1-ac2a-6ba284c65fa3 [ 1029.562537] env[61905]: DEBUG nova.network.neutron [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [{"id": "27393fae-ed78-4714-bde6-c887048264a3", "address": "fa:16:3e:b9:46:80", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27393fae-ed", "ovs_interfaceid": "27393fae-ed78-4714-bde6-c887048264a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.042087] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e2d5360b-b930-4055-838a-f2b17fbc2e1e tempest-ServerDiskConfigTestJSON-999493042 tempest-ServerDiskConfigTestJSON-999493042-project-member] Lock "817117bb-1728-42a1-ac2a-6ba284c65fa3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.449s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.065574] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.621637] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0ed5ba-4151-4187-a43d-13f65a5d3f1e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.630026] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b57117-3668-4e76-b86a-b4617e8cf6fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.661796] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7150c01-77cd-47f1-a670-ac49ce4857e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.669661] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222e822d-9f28-4053-9902-a39bc380e686 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.683039] env[61905]: DEBUG nova.compute.provider_tree [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.186282] env[61905]: DEBUG nova.scheduler.client.report [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.583018] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38326ced-7b94-4959-bd54-eb88ba526d42 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.602786] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 0 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1031.691218] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.691806] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1032.108837] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.109137] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e057acf1-8afd-464a-a7b0-ecccff72d0bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.121650] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1032.121650] env[61905]: value = "task-1362922" [ 1032.121650] env[61905]: _type = "Task" [ 1032.121650] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.130370] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.197205] env[61905]: DEBUG nova.compute.utils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1032.198774] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1032.198930] env[61905]: DEBUG nova.network.neutron [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1032.245071] env[61905]: DEBUG nova.policy [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '302950aecbc54ee0843853aac306fab2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28c767f45ae54b8fbfe2c93fc9027447', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1032.535586] env[61905]: DEBUG nova.network.neutron [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Successfully created port: 7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.632709] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362922, 'name': PowerOffVM_Task, 'duration_secs': 0.194519} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.632982] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.633195] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 17 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1032.702859] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1033.139983] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:12:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1033.140266] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1033.140427] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.140609] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1033.140821] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.140997] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1033.141218] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1033.141381] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1033.141549] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1033.141712] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1033.141884] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1033.146881] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-516ec7a1-92f6-4236-8657-7c91ce045afa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.163454] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1033.163454] env[61905]: value = "task-1362923" [ 1033.163454] env[61905]: _type = "Task" [ 1033.163454] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.171614] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362923, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.674158] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362923, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.711580] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1033.736564] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1033.736828] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1033.737111] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.737311] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1033.737463] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.737611] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1033.737815] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1033.737975] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1033.738160] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1033.738326] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1033.738500] env[61905]: DEBUG nova.virt.hardware [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1033.739457] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8978c1-c655-4e86-9995-b96d9f92d8b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.747368] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab65d974-fbfe-45c8-9b00-7093c47b554f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.900620] env[61905]: DEBUG nova.compute.manager [req-4330df96-338d-4187-991a-b7d5abe699a6 req-764cb913-130e-4acf-b60a-95c5b4ac5919 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Received event network-vif-plugged-7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1033.900833] env[61905]: DEBUG oslo_concurrency.lockutils [req-4330df96-338d-4187-991a-b7d5abe699a6 req-764cb913-130e-4acf-b60a-95c5b4ac5919 service nova] Acquiring lock "c027ecd4-9502-4333-b0ac-315be7240d6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.901050] env[61905]: DEBUG oslo_concurrency.lockutils [req-4330df96-338d-4187-991a-b7d5abe699a6 req-764cb913-130e-4acf-b60a-95c5b4ac5919 service nova] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.901233] env[61905]: DEBUG oslo_concurrency.lockutils [req-4330df96-338d-4187-991a-b7d5abe699a6 req-764cb913-130e-4acf-b60a-95c5b4ac5919 service nova] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.901404] env[61905]: DEBUG nova.compute.manager [req-4330df96-338d-4187-991a-b7d5abe699a6 req-764cb913-130e-4acf-b60a-95c5b4ac5919 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] No waiting events found dispatching network-vif-plugged-7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1033.901570] env[61905]: WARNING nova.compute.manager [req-4330df96-338d-4187-991a-b7d5abe699a6 req-764cb913-130e-4acf-b60a-95c5b4ac5919 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Received unexpected event network-vif-plugged-7d029270-bd48-4a02-9caa-ba9efa46aaee for instance with vm_state building and task_state spawning. [ 1033.986052] env[61905]: DEBUG nova.network.neutron [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Successfully updated port: 7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.175922] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362923, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.488248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.488444] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.488568] env[61905]: DEBUG nova.network.neutron [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.675364] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362923, 'name': ReconfigVM_Task, 'duration_secs': 1.158532} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.675716] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 33 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1035.020484] env[61905]: DEBUG nova.network.neutron [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1035.140803] env[61905]: DEBUG nova.network.neutron [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updating instance_info_cache with network_info: [{"id": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "address": "fa:16:3e:a2:e5:58", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d029270-bd", "ovs_interfaceid": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.183408] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1035.183708] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1035.183876] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.184137] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1035.184376] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.184584] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1035.184799] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1035.185009] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1035.185231] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1035.185509] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1035.185713] env[61905]: DEBUG nova.virt.hardware [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1035.190855] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1035.191426] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c63965ca-d748-4e11-a492-22d42fd68e0c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.210311] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1035.210311] env[61905]: value = "task-1362924" [ 1035.210311] env[61905]: _type = "Task" [ 1035.210311] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.217935] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362924, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.643342] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.643681] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Instance network_info: |[{"id": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "address": "fa:16:3e:a2:e5:58", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d029270-bd", "ovs_interfaceid": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1035.644167] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:e5:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d029270-bd48-4a02-9caa-ba9efa46aaee', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.652007] env[61905]: DEBUG oslo.service.loopingcall [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.652327] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.652644] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a406ff6f-e2df-4469-9db7-13a5efba390c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.678196] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.678196] env[61905]: value = "task-1362925" [ 1035.678196] env[61905]: _type = "Task" [ 1035.678196] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.688760] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362925, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.718078] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362924, 'name': ReconfigVM_Task, 'duration_secs': 0.150428} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.718334] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1035.719065] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67777d99-14d8-487d-b3c4-a280c26aee36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.741206] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 49305caf-e169-4c03-9968-be40567b92c7/49305caf-e169-4c03-9968-be40567b92c7.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.741595] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ae8a98d-8d2c-4170-b183-0c0cc839312d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.760767] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1035.760767] env[61905]: value = "task-1362926" [ 1035.760767] env[61905]: _type = "Task" [ 1035.760767] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.768300] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362926, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.929841] env[61905]: DEBUG nova.compute.manager [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Received event network-changed-7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1035.930079] env[61905]: DEBUG nova.compute.manager [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Refreshing instance network info cache due to event network-changed-7d029270-bd48-4a02-9caa-ba9efa46aaee. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1035.930319] env[61905]: DEBUG oslo_concurrency.lockutils [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] Acquiring lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.930477] env[61905]: DEBUG oslo_concurrency.lockutils [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] Acquired lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.930635] env[61905]: DEBUG nova.network.neutron [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Refreshing network info cache for port 7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1036.187985] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362925, 'name': CreateVM_Task, 'duration_secs': 0.27649} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.188223] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.188956] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.189191] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.189556] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1036.189847] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8493809d-d430-44d4-bb4f-92a4e9f7672f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.194907] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1036.194907] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a3188-c604-a903-9dc8-7b48f7fedef9" [ 1036.194907] env[61905]: _type = "Task" [ 1036.194907] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.202894] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a3188-c604-a903-9dc8-7b48f7fedef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.269406] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362926, 'name': ReconfigVM_Task, 'duration_secs': 0.271012} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.269665] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 49305caf-e169-4c03-9968-be40567b92c7/49305caf-e169-4c03-9968-be40567b92c7.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.269937] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 50 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1036.647723] env[61905]: DEBUG nova.network.neutron [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updated VIF entry in instance network info cache for port 7d029270-bd48-4a02-9caa-ba9efa46aaee. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.648112] env[61905]: DEBUG nova.network.neutron [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updating instance_info_cache with network_info: [{"id": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "address": "fa:16:3e:a2:e5:58", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d029270-bd", "ovs_interfaceid": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.707187] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522a3188-c604-a903-9dc8-7b48f7fedef9, 'name': SearchDatastore_Task, 'duration_secs': 0.010029} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.707501] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.707741] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.707980] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.708152] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.708337] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.708585] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63a6ffc4-43c5-4814-ba66-46233416a99d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.716364] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.716535] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.717208] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-535381d1-3b1d-410c-a5b6-2dfc19fcdd63 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.723482] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1036.723482] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5209a1b1-831a-7495-db94-f2c35904965b" [ 1036.723482] env[61905]: _type = "Task" [ 1036.723482] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.730450] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5209a1b1-831a-7495-db94-f2c35904965b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.775657] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af3cc76-2a6f-41a1-9d7e-4efc6a2cfb43 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.793982] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46467d96-13e2-49a2-8372-58332064c45f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.810560] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 67 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1036.940361] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.940655] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.940913] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "7b0db0a2-c990-4160-9be8-018239425114-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.941132] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.941315] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.943935] env[61905]: INFO nova.compute.manager [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Terminating instance [ 1036.945754] env[61905]: DEBUG nova.compute.manager [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.945952] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.946799] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b9d127-cbaa-4754-bea5-d8f5a6e91ff9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.954491] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.954727] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d62d899-8094-4da7-812c-c7c6b2b3e451 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.961683] env[61905]: DEBUG oslo_vmware.api [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1036.961683] env[61905]: value = "task-1362927" [ 1036.961683] env[61905]: _type = "Task" [ 1036.961683] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.968971] env[61905]: DEBUG oslo_vmware.api [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362927, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.150572] env[61905]: DEBUG oslo_concurrency.lockutils [req-0af9a74c-dd32-4a20-b9d6-fbff8c3b7b5f req-b670d769-06fa-4af4-91f6-65709d043ebd service nova] Releasing lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.233944] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5209a1b1-831a-7495-db94-f2c35904965b, 'name': SearchDatastore_Task, 'duration_secs': 0.007844} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.234775] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd5a43a6-01c1-485c-88ad-e864a0358d4b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.239980] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1037.239980] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52db1ef4-d7bd-cc03-607e-8b3ef3c1b633" [ 1037.239980] env[61905]: _type = "Task" [ 1037.239980] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.247768] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52db1ef4-d7bd-cc03-607e-8b3ef3c1b633, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.349941] env[61905]: DEBUG nova.network.neutron [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Port 27393fae-ed78-4714-bde6-c887048264a3 binding to destination host cpu-1 is already ACTIVE {{(pid=61905) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1037.470682] env[61905]: DEBUG oslo_vmware.api [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362927, 'name': PowerOffVM_Task, 'duration_secs': 0.182548} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.470954] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.471142] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.471398] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-406fd1d2-454a-45a6-9480-5a07f8f4600a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.529846] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.530085] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.530282] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Deleting the datastore file [datastore1] 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.530542] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aceb0f56-c638-474e-9f8d-8bbfa38fd2fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.536834] env[61905]: DEBUG oslo_vmware.api [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1037.536834] env[61905]: value = "task-1362929" [ 1037.536834] env[61905]: _type = "Task" [ 1037.536834] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.544387] env[61905]: DEBUG oslo_vmware.api [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.749970] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52db1ef4-d7bd-cc03-607e-8b3ef3c1b633, 'name': SearchDatastore_Task, 'duration_secs': 0.008421} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.750283] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.750492] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] c027ecd4-9502-4333-b0ac-315be7240d6d/c027ecd4-9502-4333-b0ac-315be7240d6d.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.750741] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-606f745a-02be-48bb-b005-11d42c114474 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.756791] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1037.756791] env[61905]: value = "task-1362930" [ 1037.756791] env[61905]: _type = "Task" [ 1037.756791] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.763787] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362930, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.050772] env[61905]: DEBUG oslo_vmware.api [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142006} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.050772] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.051135] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1038.051168] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.051434] env[61905]: INFO nova.compute.manager [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1038.051863] env[61905]: DEBUG oslo.service.loopingcall [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.052165] env[61905]: DEBUG nova.compute.manager [-] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1038.052621] env[61905]: DEBUG nova.network.neutron [-] [instance: 7b0db0a2-c990-4160-9be8-018239425114] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1038.267087] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362930, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.371608] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "49305caf-e169-4c03-9968-be40567b92c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.371759] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.371959] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.517341] env[61905]: DEBUG nova.compute.manager [req-5ad286ab-7bb7-4f96-b856-c7ff41c9a532 req-10146250-895d-40ae-89eb-fe404850b7bd service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Received event network-vif-deleted-55782888-9c3d-4f40-852f-9cff30eb514b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.517443] env[61905]: INFO nova.compute.manager [req-5ad286ab-7bb7-4f96-b856-c7ff41c9a532 req-10146250-895d-40ae-89eb-fe404850b7bd service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Neutron deleted interface 55782888-9c3d-4f40-852f-9cff30eb514b; detaching it from the instance and deleting it from the info cache [ 1038.517664] env[61905]: DEBUG nova.network.neutron [req-5ad286ab-7bb7-4f96-b856-c7ff41c9a532 req-10146250-895d-40ae-89eb-fe404850b7bd service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.768164] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362930, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52145} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.768448] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] c027ecd4-9502-4333-b0ac-315be7240d6d/c027ecd4-9502-4333-b0ac-315be7240d6d.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.768624] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.768869] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59e9ba69-8647-4dfc-8df8-46478dcaf813 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.774772] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1038.774772] env[61905]: value = "task-1362931" [ 1038.774772] env[61905]: _type = "Task" [ 1038.774772] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.782086] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.001573] env[61905]: DEBUG nova.network.neutron [-] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.020158] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e16a18c-7506-4032-aa23-24d425059d16 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.030476] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d697343e-0660-4639-bfc7-82167ad2a0c4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.058863] env[61905]: DEBUG nova.compute.manager [req-5ad286ab-7bb7-4f96-b856-c7ff41c9a532 req-10146250-895d-40ae-89eb-fe404850b7bd service nova] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Detach interface failed, port_id=55782888-9c3d-4f40-852f-9cff30eb514b, reason: Instance 7b0db0a2-c990-4160-9be8-018239425114 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1039.286965] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064114} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.287196] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.287998] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769b5180-60d4-4d2c-8bca-de10dfa0d220 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.309161] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] c027ecd4-9502-4333-b0ac-315be7240d6d/c027ecd4-9502-4333-b0ac-315be7240d6d.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.309446] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d43381a-547b-446e-b9da-b908a7aafa5e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.328046] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1039.328046] env[61905]: value = "task-1362932" [ 1039.328046] env[61905]: _type = "Task" [ 1039.328046] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.335509] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362932, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.406936] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1039.406936] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.406936] env[61905]: DEBUG nova.network.neutron [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.465066] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.465066] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.465066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1039.465066] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1039.505896] env[61905]: INFO nova.compute.manager [-] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Took 1.45 seconds to deallocate network for instance. [ 1039.839385] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362932, 'name': ReconfigVM_Task, 'duration_secs': 0.27246} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.839864] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Reconfigured VM instance instance-00000067 to attach disk [datastore1] c027ecd4-9502-4333-b0ac-315be7240d6d/c027ecd4-9502-4333-b0ac-315be7240d6d.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.840794] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8430f17-ab1f-4a81-af8b-bde074892169 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.847970] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1039.847970] env[61905]: value = "task-1362933" [ 1039.847970] env[61905]: _type = "Task" [ 1039.847970] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.858013] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362933, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.969252] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Skipping network cache update for instance because it is being deleted. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1039.969438] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Skipping network cache update for instance because it is Building. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1040.007272] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.007416] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.007561] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1040.007716] env[61905]: DEBUG nova.objects.instance [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lazy-loading 'info_cache' on Instance uuid 02a40a20-1506-48f2-bbd2-db62e5dfa166 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.012288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.012546] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.012756] env[61905]: DEBUG nova.objects.instance [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'resources' on Instance uuid 7b0db0a2-c990-4160-9be8-018239425114 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.127434] env[61905]: DEBUG nova.network.neutron [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [{"id": "27393fae-ed78-4714-bde6-c887048264a3", "address": "fa:16:3e:b9:46:80", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27393fae-ed", "ovs_interfaceid": "27393fae-ed78-4714-bde6-c887048264a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.358215] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362933, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.596008] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc05ab1-a014-4af1-ba8a-b424aad60bcd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.603553] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2595d9-f75f-400d-8838-914872834264 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.638238] env[61905]: DEBUG oslo_concurrency.lockutils [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.642302] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5b6f27-5543-46f3-9d02-68426686a6f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.649916] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cfc3e8-ef35-4ed1-8d2d-41ee23e3b104 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.663067] env[61905]: DEBUG nova.compute.provider_tree [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.858852] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362933, 'name': Rename_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.161734] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf853a0-0381-4469-bd00-11e775ad68a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.165317] env[61905]: DEBUG nova.scheduler.client.report [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.184536] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb4a48c-e110-47a8-986a-21847fe8daf8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.191520] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 83 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1041.358979] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362933, 'name': Rename_Task, 'duration_secs': 1.109322} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.359279] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.359529] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c7b321c-3635-4d3a-8f7d-f4fdebd34e57 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.366217] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1041.366217] env[61905]: value = "task-1362934" [ 1041.366217] env[61905]: _type = "Task" [ 1041.366217] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.374776] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.669893] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.657s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.694541] env[61905]: INFO nova.scheduler.client.report [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Deleted allocations for instance 7b0db0a2-c990-4160-9be8-018239425114 [ 1041.697024] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.699424] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b5b4e86-b751-403b-8ec1-8743311f5e01 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.714245] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1041.714245] env[61905]: value = "task-1362935" [ 1041.714245] env[61905]: _type = "Task" [ 1041.714245] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.724228] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362935, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.782702] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating instance_info_cache with network_info: [{"id": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "address": "fa:16:3e:bb:0a:52", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d82d8e0-26", "ovs_interfaceid": "3d82d8e0-2624-4d0b-a98b-1cfd93ccf628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.875793] env[61905]: DEBUG oslo_vmware.api [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362934, 'name': PowerOnVM_Task, 'duration_secs': 0.468464} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.876110] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.876292] env[61905]: INFO nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Took 8.16 seconds to spawn the instance on the hypervisor. [ 1041.876480] env[61905]: DEBUG nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1041.877249] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea9c989-033f-4ef5-b12d-bc7b09fcd08c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.204847] env[61905]: DEBUG oslo_concurrency.lockutils [None req-e0481ca9-1921-4fc5-a709-91f8cd77417d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "7b0db0a2-c990-4160-9be8-018239425114" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.264s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.225833] env[61905]: DEBUG oslo_vmware.api [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362935, 'name': PowerOnVM_Task, 'duration_secs': 0.394852} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.226215] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.226416] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-43ab2bd5-9ccf-40a8-9cba-7d184a82b319 tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance '49305caf-e169-4c03-9968-be40567b92c7' progress to 100 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1042.286061] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-02a40a20-1506-48f2-bbd2-db62e5dfa166" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.286061] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1042.286061] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.286061] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.286357] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.286357] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.286463] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.286606] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.286728] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1042.286867] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.395941] env[61905]: INFO nova.compute.manager [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Took 15.43 seconds to build instance. [ 1042.789139] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.789382] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.789559] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.789777] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1042.791105] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05122e84-3f2b-4017-b504-98dd33e9bb56 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.799715] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67847710-6b84-47cb-8da6-8ea2f2f187e1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.813576] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32a1a04-11f6-4d0d-89d5-1f9009ee4fdf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.819852] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c824f89-5179-4965-974f-5a5d7291b532 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.850285] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181006MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1042.850462] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.850637] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.898379] env[61905]: DEBUG oslo_concurrency.lockutils [None req-49961140-de56-440b-9449-0a0002cd4d2e tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.937s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.485861] env[61905]: DEBUG nova.compute.manager [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Received event network-changed-7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1043.486233] env[61905]: DEBUG nova.compute.manager [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Refreshing instance network info cache due to event network-changed-7d029270-bd48-4a02-9caa-ba9efa46aaee. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1043.486581] env[61905]: DEBUG oslo_concurrency.lockutils [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] Acquiring lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.486830] env[61905]: DEBUG oslo_concurrency.lockutils [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] Acquired lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.487114] env[61905]: DEBUG nova.network.neutron [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Refreshing network info cache for port 7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.573949] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.574307] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.574524] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.574720] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.574897] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.577051] env[61905]: INFO nova.compute.manager [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Terminating instance [ 1043.578815] env[61905]: DEBUG nova.compute.manager [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1043.579014] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.579841] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654d6a4b-81fa-4fc5-a7b7-d697f05a2164 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.588337] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.588592] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8de91b3-a625-4d0a-98c5-b7c4d9252313 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.595117] env[61905]: DEBUG oslo_vmware.api [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1043.595117] env[61905]: value = "task-1362936" [ 1043.595117] env[61905]: _type = "Task" [ 1043.595117] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.602273] env[61905]: DEBUG oslo_vmware.api [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.803735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.803982] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.858263] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Applying migration context for instance 49305caf-e169-4c03-9968-be40567b92c7 as it has an incoming, in-progress migration b37fb18c-ee8e-4f45-af2d-1d392bf94fec. Migration status is finished {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1043.859019] env[61905]: INFO nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating resource usage from migration b37fb18c-ee8e-4f45-af2d-1d392bf94fec [ 1043.874142] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 02a40a20-1506-48f2-bbd2-db62e5dfa166 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.874287] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.874401] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Migration b37fb18c-ee8e-4f45-af2d-1d392bf94fec is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1043.874519] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 49305caf-e169-4c03-9968-be40567b92c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1043.874632] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c027ecd4-9502-4333-b0ac-315be7240d6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1044.106668] env[61905]: DEBUG oslo_vmware.api [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362936, 'name': PowerOffVM_Task, 'duration_secs': 0.191272} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.107027] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.107115] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.107371] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adc96c21-a867-4238-b2c4-72a4c6c2b1fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.180653] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.180958] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.181204] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Deleting the datastore file [datastore2] 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.183718] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bef27814-3ac6-471b-bd92-353f2e9ea7ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.190665] env[61905]: DEBUG oslo_vmware.api [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for the task: (returnval){ [ 1044.190665] env[61905]: value = "task-1362938" [ 1044.190665] env[61905]: _type = "Task" [ 1044.190665] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.198706] env[61905]: DEBUG oslo_vmware.api [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.219266] env[61905]: DEBUG nova.network.neutron [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updated VIF entry in instance network info cache for port 7d029270-bd48-4a02-9caa-ba9efa46aaee. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.219610] env[61905]: DEBUG nova.network.neutron [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updating instance_info_cache with network_info: [{"id": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "address": "fa:16:3e:a2:e5:58", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d029270-bd", "ovs_interfaceid": "7d029270-bd48-4a02-9caa-ba9efa46aaee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.272358] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "49305caf-e169-4c03-9968-be40567b92c7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.272701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.272954] env[61905]: DEBUG nova.compute.manager [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Going to confirm migration 3 {{(pid=61905) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1044.305753] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1044.377374] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c8ad7fcb-4678-40cd-89af-e13de828579b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1044.377542] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1044.377692] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1044.469123] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268b45f-0a9d-40ab-9bb3-4ec8adca747a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.477153] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed30095-5d8d-4888-9c8c-a8e8f62c23d3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.509698] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade281c3-6828-4641-87ea-ea862cfe6c46 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.517255] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0276804-e071-4c3b-90c1-7a02e484f28e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.530430] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.702025] env[61905]: DEBUG oslo_vmware.api [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Task: {'id': task-1362938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163696} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.702025] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.702025] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.702025] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.702025] env[61905]: INFO nova.compute.manager [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1044.702360] env[61905]: DEBUG oslo.service.loopingcall [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.702477] env[61905]: DEBUG nova.compute.manager [-] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1044.702571] env[61905]: DEBUG nova.network.neutron [-] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1044.722085] env[61905]: DEBUG oslo_concurrency.lockutils [req-88224601-f1e5-4f55-ad7b-d481ce22df92 req-0f33321d-0c16-4109-9946-9f8c535bfd64 service nova] Releasing lock "refresh_cache-c027ecd4-9502-4333-b0ac-315be7240d6d" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.822800] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.822998] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquired lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.823297] env[61905]: DEBUG nova.network.neutron [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.823498] env[61905]: DEBUG nova.objects.instance [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'info_cache' on Instance uuid 49305caf-e169-4c03-9968-be40567b92c7 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.829042] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.034099] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1045.174680] env[61905]: DEBUG nova.compute.manager [req-b9a7573d-f87d-4426-9d1e-8160381bc48d req-be9aeece-1580-4062-809c-2996ec5fd92c service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Received event network-vif-deleted-560cc744-85c9-41e0-9cca-2eccf7ca8535 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1045.174935] env[61905]: INFO nova.compute.manager [req-b9a7573d-f87d-4426-9d1e-8160381bc48d req-be9aeece-1580-4062-809c-2996ec5fd92c service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Neutron deleted interface 560cc744-85c9-41e0-9cca-2eccf7ca8535; detaching it from the instance and deleting it from the info cache [ 1045.175108] env[61905]: DEBUG nova.network.neutron [req-b9a7573d-f87d-4426-9d1e-8160381bc48d req-be9aeece-1580-4062-809c-2996ec5fd92c service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.539028] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1045.539265] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.689s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.539555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.714s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.541112] env[61905]: INFO nova.compute.claims [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.647055] env[61905]: DEBUG nova.network.neutron [-] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.677924] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c94a3afc-f5af-4729-830d-f6eb80dc1d5b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.688201] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5306e893-f30c-49da-ba52-354ff7cb5d4d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.719821] env[61905]: DEBUG nova.compute.manager [req-b9a7573d-f87d-4426-9d1e-8160381bc48d req-be9aeece-1580-4062-809c-2996ec5fd92c service nova] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Detach interface failed, port_id=560cc744-85c9-41e0-9cca-2eccf7ca8535, reason: Instance 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1046.017758] env[61905]: DEBUG nova.network.neutron [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [{"id": "27393fae-ed78-4714-bde6-c887048264a3", "address": "fa:16:3e:b9:46:80", "network": {"id": "eff74ab8-6762-420e-9bab-29e98b344828", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-214692607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30edd7bc94ee492cb7f4e4f388e45b8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27393fae-ed", "ovs_interfaceid": "27393fae-ed78-4714-bde6-c887048264a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.149304] env[61905]: INFO nova.compute.manager [-] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Took 1.45 seconds to deallocate network for instance. [ 1046.520402] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Releasing lock "refresh_cache-49305caf-e169-4c03-9968-be40567b92c7" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.520697] env[61905]: DEBUG nova.objects.instance [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lazy-loading 'migration_context' on Instance uuid 49305caf-e169-4c03-9968-be40567b92c7 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.626021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dbaee0-e697-4496-a6c6-2cf5da813b01 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.632677] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fae2cbb-71b8-4cb3-9930-be7dcafef745 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.660723] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.661852] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26b2dcf-46f3-496d-8e87-873f86be6861 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.668375] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0de188d-caa0-4b95-b063-0332ebab139b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.680523] env[61905]: DEBUG nova.compute.provider_tree [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.023479] env[61905]: DEBUG nova.objects.base [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Object Instance<49305caf-e169-4c03-9968-be40567b92c7> lazy-loaded attributes: info_cache,migration_context {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1047.024415] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0d0919-7988-4c32-a6f1-6519140ee276 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.044539] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ff9485e-4aec-46b6-b9cf-b4cdf3ebadf5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.049808] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1047.049808] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525d7486-1e01-8e1d-667e-46ac5a1a3327" [ 1047.049808] env[61905]: _type = "Task" [ 1047.049808] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.057315] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525d7486-1e01-8e1d-667e-46ac5a1a3327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.183665] env[61905]: DEBUG nova.scheduler.client.report [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1047.560037] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525d7486-1e01-8e1d-667e-46ac5a1a3327, 'name': SearchDatastore_Task, 'duration_secs': 0.008089} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.560394] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.688382] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.688884] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1047.691661] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.031s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.691880] env[61905]: DEBUG nova.objects.instance [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lazy-loading 'resources' on Instance uuid 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.195862] env[61905]: DEBUG nova.compute.utils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1048.200093] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1048.200093] env[61905]: DEBUG nova.network.neutron [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1048.238795] env[61905]: DEBUG nova.policy [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b33029a3a5374cd9ae9b795f390dc39b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0cb369144a2b44df9fbc5552ec50697a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1048.274859] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04a78d2-76b5-46c1-8d11-d4ca1228c7a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.282584] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3be3219-adea-4118-b1aa-e2c9b151f4ff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.312088] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54daaf2-8f72-45bb-a005-e9dba7eac5fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.319539] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3ec6cb-3611-472b-9bd4-6b7f9edce837 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.333743] env[61905]: DEBUG nova.compute.provider_tree [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.502847] env[61905]: DEBUG nova.network.neutron [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Successfully created port: f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.701801] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1048.836944] env[61905]: DEBUG nova.scheduler.client.report [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1049.341586] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.650s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.344044] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.784s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.366782] env[61905]: INFO nova.scheduler.client.report [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Deleted allocations for instance 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa [ 1049.712555] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1049.737892] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1049.738126] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1049.738290] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.738475] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1049.738624] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.738771] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1049.739016] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1049.739206] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1049.739380] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1049.739545] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1049.739716] env[61905]: DEBUG nova.virt.hardware [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1049.740826] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91c9321-4695-433d-bd7a-c6e180c5b9bf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.749216] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355f790f-7722-4804-9d90-7978930ff0a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.873469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-7a2075fb-105d-4224-93b7-2b792e0eebf8 tempest-ServersTestFqdnHostnames-100764059 tempest-ServersTestFqdnHostnames-100764059-project-member] Lock "15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.299s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.897516] env[61905]: DEBUG nova.compute.manager [req-c757a385-540d-4292-a227-53e134456b08 req-32a143d6-7b65-4f2c-9645-addb0278ed0d service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Received event network-vif-plugged-f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1049.897734] env[61905]: DEBUG oslo_concurrency.lockutils [req-c757a385-540d-4292-a227-53e134456b08 req-32a143d6-7b65-4f2c-9645-addb0278ed0d service nova] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.897944] env[61905]: DEBUG oslo_concurrency.lockutils [req-c757a385-540d-4292-a227-53e134456b08 req-32a143d6-7b65-4f2c-9645-addb0278ed0d service nova] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.898143] env[61905]: DEBUG oslo_concurrency.lockutils [req-c757a385-540d-4292-a227-53e134456b08 req-32a143d6-7b65-4f2c-9645-addb0278ed0d service nova] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.898387] env[61905]: DEBUG nova.compute.manager [req-c757a385-540d-4292-a227-53e134456b08 req-32a143d6-7b65-4f2c-9645-addb0278ed0d service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] No waiting events found dispatching network-vif-plugged-f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1049.898467] env[61905]: WARNING nova.compute.manager [req-c757a385-540d-4292-a227-53e134456b08 req-32a143d6-7b65-4f2c-9645-addb0278ed0d service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Received unexpected event network-vif-plugged-f15cfc92-b273-4628-b203-bb8462935ba9 for instance with vm_state building and task_state spawning. [ 1049.927232] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89afba90-df72-47c7-83ba-9061c35b2ab1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.935560] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527d8310-919b-42d5-880d-91deb621df62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.967334] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71f534d-5171-4128-b93f-b4c6fcd1f051 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.975062] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a52265-d3f8-4c40-af4f-a36310024e23 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.988026] env[61905]: DEBUG nova.compute.provider_tree [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.995564] env[61905]: DEBUG nova.network.neutron [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Successfully updated port: f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1050.491222] env[61905]: DEBUG nova.scheduler.client.report [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1050.498574] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.498717] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.498861] env[61905]: DEBUG nova.network.neutron [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.038025] env[61905]: DEBUG nova.network.neutron [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1051.184626] env[61905]: DEBUG nova.network.neutron [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating instance_info_cache with network_info: [{"id": "f15cfc92-b273-4628-b203-bb8462935ba9", "address": "fa:16:3e:3c:cb:79", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15cfc92-b2", "ovs_interfaceid": "f15cfc92-b273-4628-b203-bb8462935ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.501187] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.157s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.688028] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.688028] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Instance network_info: |[{"id": "f15cfc92-b273-4628-b203-bb8462935ba9", "address": "fa:16:3e:3c:cb:79", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15cfc92-b2", "ovs_interfaceid": "f15cfc92-b273-4628-b203-bb8462935ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1051.688379] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:cb:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f54f7284-8f7d-47ee-839d-2143062cfe44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f15cfc92-b273-4628-b203-bb8462935ba9', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1051.696201] env[61905]: DEBUG oslo.service.loopingcall [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1051.696456] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1051.696696] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-262274f4-ec86-4bbb-b8d3-c79ae00854e8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.718963] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1051.718963] env[61905]: value = "task-1362939" [ 1051.718963] env[61905]: _type = "Task" [ 1051.718963] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.730967] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362939, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.017105] env[61905]: DEBUG nova.compute.manager [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Received event network-changed-f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1052.017325] env[61905]: DEBUG nova.compute.manager [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Refreshing instance network info cache due to event network-changed-f15cfc92-b273-4628-b203-bb8462935ba9. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1052.017618] env[61905]: DEBUG oslo_concurrency.lockutils [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] Acquiring lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.017721] env[61905]: DEBUG oslo_concurrency.lockutils [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] Acquired lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.017830] env[61905]: DEBUG nova.network.neutron [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Refreshing network info cache for port f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.067742] env[61905]: INFO nova.scheduler.client.report [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocation for migration b37fb18c-ee8e-4f45-af2d-1d392bf94fec [ 1052.228882] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362939, 'name': CreateVM_Task, 'duration_secs': 0.302114} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.229040] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.229735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.229903] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.230246] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1052.230503] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbf353aa-f0ea-4ec1-bbb8-c93a4e889cb2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.235216] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1052.235216] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a2af28-a00d-7bb8-f23b-efae903f28a5" [ 1052.235216] env[61905]: _type = "Task" [ 1052.235216] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.242548] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a2af28-a00d-7bb8-f23b-efae903f28a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.575466] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.302s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.725660] env[61905]: DEBUG nova.network.neutron [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updated VIF entry in instance network info cache for port f15cfc92-b273-4628-b203-bb8462935ba9. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.726177] env[61905]: DEBUG nova.network.neutron [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating instance_info_cache with network_info: [{"id": "f15cfc92-b273-4628-b203-bb8462935ba9", "address": "fa:16:3e:3c:cb:79", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15cfc92-b2", "ovs_interfaceid": "f15cfc92-b273-4628-b203-bb8462935ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.746326] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52a2af28-a00d-7bb8-f23b-efae903f28a5, 'name': SearchDatastore_Task, 'duration_secs': 0.009679} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.746613] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.746876] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.747263] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.747263] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.747447] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.747703] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1edfd28-d801-4a6a-8cd1-9c7715009f16 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.755961] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.756400] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.757136] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51f6d82f-8ff0-4b80-ada1-f41fec5b4270 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.762369] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1052.762369] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c2fecc-4484-cd7f-190e-812468a4ae68" [ 1052.762369] env[61905]: _type = "Task" [ 1052.762369] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.769909] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c2fecc-4484-cd7f-190e-812468a4ae68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.228522] env[61905]: DEBUG oslo_concurrency.lockutils [req-e99afaac-f061-4f40-8064-e73180014d46 req-9e6c3807-898a-4e88-b7a7-48b0db2e2369 service nova] Releasing lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.273574] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c2fecc-4484-cd7f-190e-812468a4ae68, 'name': SearchDatastore_Task, 'duration_secs': 0.008511} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.274398] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41625804-bd19-42b6-9bbc-35f8dc745aa5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.279615] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1053.279615] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5203252f-9617-12c4-cf26-56215c691097" [ 1053.279615] env[61905]: _type = "Task" [ 1053.279615] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.296326] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5203252f-9617-12c4-cf26-56215c691097, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.790783] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5203252f-9617-12c4-cf26-56215c691097, 'name': SearchDatastore_Task, 'duration_secs': 0.010534} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.791107] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.791393] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] c8ad7fcb-4678-40cd-89af-e13de828579b/c8ad7fcb-4678-40cd-89af-e13de828579b.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.792119] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0450a1bf-01c5-43a8-8f9f-34a4f05f17c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.798103] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1053.798103] env[61905]: value = "task-1362940" [ 1053.798103] env[61905]: _type = "Task" [ 1053.798103] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.806553] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.811962] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "49305caf-e169-4c03-9968-be40567b92c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.812242] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.812454] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "49305caf-e169-4c03-9968-be40567b92c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.812646] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.812824] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.816029] env[61905]: INFO nova.compute.manager [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Terminating instance [ 1053.816897] env[61905]: DEBUG nova.compute.manager [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1053.817103] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1053.818810] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bac3e5-d8b8-49c3-a0ea-795d196fa2fe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.824454] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1053.824675] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32116590-b109-4bec-96b8-c6d19fbd408c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.832031] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1053.832031] env[61905]: value = "task-1362941" [ 1053.832031] env[61905]: _type = "Task" [ 1053.832031] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.839487] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.309132] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362940, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.341480] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362941, 'name': PowerOffVM_Task, 'duration_secs': 0.257894} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.341783] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1054.341964] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1054.342268] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e15e040-1ae6-4c44-8bd1-98a2e15ff276 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.418563] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.418961] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.419191] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleting the datastore file [datastore2] 49305caf-e169-4c03-9968-be40567b92c7 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.419481] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a577ba43-e2d6-4b50-a745-722b95c39a53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.428361] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for the task: (returnval){ [ 1054.428361] env[61905]: value = "task-1362943" [ 1054.428361] env[61905]: _type = "Task" [ 1054.428361] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.436825] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.809604] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531468} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.809805] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] c8ad7fcb-4678-40cd-89af-e13de828579b/c8ad7fcb-4678-40cd-89af-e13de828579b.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1054.810038] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.810298] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3637edc9-ac4f-4ac0-9a17-32adbc30906b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.817344] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1054.817344] env[61905]: value = "task-1362944" [ 1054.817344] env[61905]: _type = "Task" [ 1054.817344] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.824920] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.939702] env[61905]: DEBUG oslo_vmware.api [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Task: {'id': task-1362943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197146} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.939702] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.939702] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.939875] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.939981] env[61905]: INFO nova.compute.manager [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1054.940267] env[61905]: DEBUG oslo.service.loopingcall [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1054.940494] env[61905]: DEBUG nova.compute.manager [-] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1054.940600] env[61905]: DEBUG nova.network.neutron [-] [instance: 49305caf-e169-4c03-9968-be40567b92c7] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1055.199621] env[61905]: DEBUG nova.compute.manager [req-ba12cf3b-ed8a-41f2-b5d3-f520f3d53ce4 req-e7fe794e-d226-4a3a-a422-99ba858caeba service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Received event network-vif-deleted-27393fae-ed78-4714-bde6-c887048264a3 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1055.199621] env[61905]: INFO nova.compute.manager [req-ba12cf3b-ed8a-41f2-b5d3-f520f3d53ce4 req-e7fe794e-d226-4a3a-a422-99ba858caeba service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Neutron deleted interface 27393fae-ed78-4714-bde6-c887048264a3; detaching it from the instance and deleting it from the info cache [ 1055.199872] env[61905]: DEBUG nova.network.neutron [req-ba12cf3b-ed8a-41f2-b5d3-f520f3d53ce4 req-e7fe794e-d226-4a3a-a422-99ba858caeba service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.327334] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14706} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.327657] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.328385] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587e58ce-cf0d-4294-a495-830d2cea8460 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.350983] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] c8ad7fcb-4678-40cd-89af-e13de828579b/c8ad7fcb-4678-40cd-89af-e13de828579b.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.351351] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b5a816e-b28b-49bf-a2b8-127e152a3f94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.371281] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1055.371281] env[61905]: value = "task-1362945" [ 1055.371281] env[61905]: _type = "Task" [ 1055.371281] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.379129] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.641022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.642731] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.670347] env[61905]: DEBUG nova.network.neutron [-] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.702545] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ff91547-d4a0-4537-9372-f0103a11bdf1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.711623] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07eaf122-5146-4740-a54d-7c4a8b2cabf9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.736078] env[61905]: DEBUG nova.compute.manager [req-ba12cf3b-ed8a-41f2-b5d3-f520f3d53ce4 req-e7fe794e-d226-4a3a-a422-99ba858caeba service nova] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Detach interface failed, port_id=27393fae-ed78-4714-bde6-c887048264a3, reason: Instance 49305caf-e169-4c03-9968-be40567b92c7 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1055.881445] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362945, 'name': ReconfigVM_Task, 'duration_secs': 0.313878} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.881658] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfigured VM instance instance-00000068 to attach disk [datastore1] c8ad7fcb-4678-40cd-89af-e13de828579b/c8ad7fcb-4678-40cd-89af-e13de828579b.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.882367] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad603af0-bb89-48a4-ac7e-9a13a7c92902 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.888531] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1055.888531] env[61905]: value = "task-1362946" [ 1055.888531] env[61905]: _type = "Task" [ 1055.888531] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.896416] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362946, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.144330] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1056.172433] env[61905]: INFO nova.compute.manager [-] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Took 1.23 seconds to deallocate network for instance. [ 1056.398583] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362946, 'name': Rename_Task, 'duration_secs': 0.1449} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.398942] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1056.399107] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de57089a-40a2-4a5b-996a-5cf10b15c200 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.405691] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1056.405691] env[61905]: value = "task-1362947" [ 1056.405691] env[61905]: _type = "Task" [ 1056.405691] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.412631] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.667421] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.667706] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.669356] env[61905]: INFO nova.compute.claims [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.679752] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.917668] env[61905]: DEBUG oslo_vmware.api [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362947, 'name': PowerOnVM_Task, 'duration_secs': 0.443803} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.917668] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.917668] env[61905]: INFO nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1056.917668] env[61905]: DEBUG nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1056.917668] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293ac4a3-16ba-4866-9e0a-31c92416c422 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.436090] env[61905]: INFO nova.compute.manager [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Took 12.63 seconds to build instance. [ 1057.750892] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99d7115-a275-46af-bbbe-96c8034a9c6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.758776] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b590e1-8e8a-47f3-a25d-d6d6ee51a60e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.788781] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38bfff7-1a1b-4dab-906d-dbf0752097bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.796363] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27af1554-8b03-4d71-adb5-edeb3f7258b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.809976] env[61905]: DEBUG nova.compute.provider_tree [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.938424] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61beb326-e47c-49e3-ae7d-48e4fed3377d tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.134s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.189950] env[61905]: DEBUG nova.compute.manager [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Received event network-changed-f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1058.190297] env[61905]: DEBUG nova.compute.manager [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Refreshing instance network info cache due to event network-changed-f15cfc92-b273-4628-b203-bb8462935ba9. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1058.190638] env[61905]: DEBUG oslo_concurrency.lockutils [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] Acquiring lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.190879] env[61905]: DEBUG oslo_concurrency.lockutils [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] Acquired lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.191265] env[61905]: DEBUG nova.network.neutron [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Refreshing network info cache for port f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.313057] env[61905]: DEBUG nova.scheduler.client.report [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1058.817794] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.150s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.818319] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1058.821440] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.141s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.821730] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.841303] env[61905]: INFO nova.scheduler.client.report [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Deleted allocations for instance 49305caf-e169-4c03-9968-be40567b92c7 [ 1058.903135] env[61905]: DEBUG nova.network.neutron [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updated VIF entry in instance network info cache for port f15cfc92-b273-4628-b203-bb8462935ba9. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1058.903509] env[61905]: DEBUG nova.network.neutron [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating instance_info_cache with network_info: [{"id": "f15cfc92-b273-4628-b203-bb8462935ba9", "address": "fa:16:3e:3c:cb:79", "network": {"id": "e282db66-19d0-4c6c-a2c8-154b6cadead7", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1218884398-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0cb369144a2b44df9fbc5552ec50697a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f54f7284-8f7d-47ee-839d-2143062cfe44", "external-id": "nsx-vlan-transportzone-569", "segmentation_id": 569, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf15cfc92-b2", "ovs_interfaceid": "f15cfc92-b273-4628-b203-bb8462935ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.326473] env[61905]: DEBUG nova.compute.utils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1059.328439] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1059.328770] env[61905]: DEBUG nova.network.neutron [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1059.350070] env[61905]: DEBUG oslo_concurrency.lockutils [None req-61347451-f471-41cf-9e52-12a069f1b28a tempest-DeleteServersTestJSON-1513696777 tempest-DeleteServersTestJSON-1513696777-project-member] Lock "49305caf-e169-4c03-9968-be40567b92c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.537s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.370529] env[61905]: DEBUG nova.policy [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b126f47b9df4f4586f377f70faada62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edbab61ee8984d0c91eab473eba0047c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1059.406123] env[61905]: DEBUG oslo_concurrency.lockutils [req-e5a90f2a-1c4c-4e10-aa9a-056b9940ee16 req-311e7688-9824-4c54-abdf-f8f4454666c7 service nova] Releasing lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.631777] env[61905]: DEBUG nova.network.neutron [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Successfully created port: a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1059.832201] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1060.842809] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1060.869041] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1060.869446] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1060.869711] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1060.870027] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1060.870297] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1060.870530] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1060.870766] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1060.870927] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1060.871145] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1060.871333] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1060.871541] env[61905]: DEBUG nova.virt.hardware [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1060.872448] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1116ec81-2223-4acc-ab0c-9f4d52e178fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.880837] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfff69a-7136-4971-8a25-a90a87252b11 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.016299] env[61905]: DEBUG nova.compute.manager [req-414ba127-fafe-432e-8b96-452a7958ceb3 req-89e4176f-1410-448f-a955-056b3d2f0b03 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Received event network-vif-plugged-a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1061.016534] env[61905]: DEBUG oslo_concurrency.lockutils [req-414ba127-fafe-432e-8b96-452a7958ceb3 req-89e4176f-1410-448f-a955-056b3d2f0b03 service nova] Acquiring lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.016746] env[61905]: DEBUG oslo_concurrency.lockutils [req-414ba127-fafe-432e-8b96-452a7958ceb3 req-89e4176f-1410-448f-a955-056b3d2f0b03 service nova] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.016967] env[61905]: DEBUG oslo_concurrency.lockutils [req-414ba127-fafe-432e-8b96-452a7958ceb3 req-89e4176f-1410-448f-a955-056b3d2f0b03 service nova] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.017153] env[61905]: DEBUG nova.compute.manager [req-414ba127-fafe-432e-8b96-452a7958ceb3 req-89e4176f-1410-448f-a955-056b3d2f0b03 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] No waiting events found dispatching network-vif-plugged-a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1061.017358] env[61905]: WARNING nova.compute.manager [req-414ba127-fafe-432e-8b96-452a7958ceb3 req-89e4176f-1410-448f-a955-056b3d2f0b03 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Received unexpected event network-vif-plugged-a105c9d5-8ba9-40c5-ba4c-a35528f5779b for instance with vm_state building and task_state spawning. [ 1061.087579] env[61905]: DEBUG nova.network.neutron [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Successfully updated port: a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1061.591278] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.591489] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.591675] env[61905]: DEBUG nova.network.neutron [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.311818] env[61905]: DEBUG nova.network.neutron [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1062.438323] env[61905]: DEBUG nova.network.neutron [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.941346] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.941642] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Instance network_info: |[{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1062.942105] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:49:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a105c9d5-8ba9-40c5-ba4c-a35528f5779b', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.949392] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating folder: Project (edbab61ee8984d0c91eab473eba0047c). Parent ref: group-v289968. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1062.949994] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68414736-42bb-4d04-8fbd-2a0b2d6f3d53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.960746] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created folder: Project (edbab61ee8984d0c91eab473eba0047c) in parent group-v289968. [ 1062.960929] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating folder: Instances. Parent ref: group-v290108. {{(pid=61905) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1062.961200] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf17f4e3-fbfb-4ede-9989-2d48597d6a25 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.970503] env[61905]: INFO nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created folder: Instances in parent group-v290108. [ 1062.970740] env[61905]: DEBUG oslo.service.loopingcall [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.970928] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.971170] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc7db67d-0d2e-4cb0-8a95-a64b43379bfa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.991286] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.991286] env[61905]: value = "task-1362951" [ 1062.991286] env[61905]: _type = "Task" [ 1062.991286] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.001699] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362951, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.045809] env[61905]: DEBUG nova.compute.manager [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Received event network-changed-a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1063.046084] env[61905]: DEBUG nova.compute.manager [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Refreshing instance network info cache due to event network-changed-a105c9d5-8ba9-40c5-ba4c-a35528f5779b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1063.046316] env[61905]: DEBUG oslo_concurrency.lockutils [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] Acquiring lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.046466] env[61905]: DEBUG oslo_concurrency.lockutils [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] Acquired lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.046633] env[61905]: DEBUG nova.network.neutron [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Refreshing network info cache for port a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.404812] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.405186] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.405225] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.405428] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1063.405641] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.405797] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Cleaning up deleted instances with incomplete migration {{(pid=61905) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1063.502602] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362951, 'name': CreateVM_Task, 'duration_secs': 0.298159} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.502839] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1063.503805] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.504037] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.504484] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1063.504813] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cd6a30e-bc17-4568-82f3-2326e0112ba6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.510393] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1063.510393] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52153aa5-7b00-10c8-2798-6473ae7b4888" [ 1063.510393] env[61905]: _type = "Task" [ 1063.510393] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.520790] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52153aa5-7b00-10c8-2798-6473ae7b4888, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.755085] env[61905]: DEBUG nova.network.neutron [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updated VIF entry in instance network info cache for port a105c9d5-8ba9-40c5-ba4c-a35528f5779b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.755489] env[61905]: DEBUG nova.network.neutron [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.024037] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52153aa5-7b00-10c8-2798-6473ae7b4888, 'name': SearchDatastore_Task, 'duration_secs': 0.009394} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.024911] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.024911] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1064.025240] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.025274] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.025506] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.026155] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4daff851-145c-4f3c-8eb3-8534df8dd528 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.035525] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.035841] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1064.036857] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4604fe91-a478-46db-b1eb-cd6585c4458d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.043610] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1064.043610] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e5864-a30d-6ac8-d8d6-2e9bd1d477a1" [ 1064.043610] env[61905]: _type = "Task" [ 1064.043610] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.054347] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]526e5864-a30d-6ac8-d8d6-2e9bd1d477a1, 'name': SearchDatastore_Task, 'duration_secs': 0.008201} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.055188] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-736dfa6a-a50a-4316-969b-96b9d8648a7b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.060558] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1064.060558] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52274276-38ab-7971-24e5-afc822fa6073" [ 1064.060558] env[61905]: _type = "Task" [ 1064.060558] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.070692] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52274276-38ab-7971-24e5-afc822fa6073, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.258621] env[61905]: DEBUG oslo_concurrency.lockutils [req-96d833bd-7c2e-4280-abc0-29632c819035 req-71bfc15d-d43b-41fc-ba7b-abbe7279d094 service nova] Releasing lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.572673] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52274276-38ab-7971-24e5-afc822fa6073, 'name': SearchDatastore_Task, 'duration_secs': 0.014118} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.572673] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.573012] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] bcca8c7b-3e80-4895-ac56-d5aa05d482e5/bcca8c7b-3e80-4895-ac56-d5aa05d482e5.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.573111] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-187d56ab-dcab-46ce-a548-4df5355f49b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.581093] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1064.581093] env[61905]: value = "task-1362952" [ 1064.581093] env[61905]: _type = "Task" [ 1064.581093] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.588961] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.908797] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.089349] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465749} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.089625] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] bcca8c7b-3e80-4895-ac56-d5aa05d482e5/bcca8c7b-3e80-4895-ac56-d5aa05d482e5.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.089837] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.090135] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1acc868f-5f52-45ca-9c65-b2f02a82a485 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.099920] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1065.099920] env[61905]: value = "task-1362953" [ 1065.099920] env[61905]: _type = "Task" [ 1065.099920] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.109792] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.404238] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.611157] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06357} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.611490] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.612219] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d042adba-519d-427e-8545-a1dbd058f3b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.633761] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] bcca8c7b-3e80-4895-ac56-d5aa05d482e5/bcca8c7b-3e80-4895-ac56-d5aa05d482e5.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.634027] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4405ddf-d3ec-42bc-b9eb-c1f3f57b801f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.653407] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1065.653407] env[61905]: value = "task-1362954" [ 1065.653407] env[61905]: _type = "Task" [ 1065.653407] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.660639] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.163313] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362954, 'name': ReconfigVM_Task, 'duration_secs': 0.288246} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.163633] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Reconfigured VM instance instance-00000069 to attach disk [datastore2] bcca8c7b-3e80-4895-ac56-d5aa05d482e5/bcca8c7b-3e80-4895-ac56-d5aa05d482e5.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.164320] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-234013e6-d757-4231-a984-19caf4c47219 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.170615] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1066.170615] env[61905]: value = "task-1362955" [ 1066.170615] env[61905]: _type = "Task" [ 1066.170615] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.177947] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362955, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.679886] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362955, 'name': Rename_Task, 'duration_secs': 0.163312} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.680278] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.680436] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2df360e-9413-43d0-9cf2-39b28e37201a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.686346] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1066.686346] env[61905]: value = "task-1362956" [ 1066.686346] env[61905]: _type = "Task" [ 1066.686346] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.693244] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.196727] env[61905]: DEBUG oslo_vmware.api [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362956, 'name': PowerOnVM_Task, 'duration_secs': 0.448816} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.196996] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.197226] env[61905]: INFO nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Took 6.35 seconds to spawn the instance on the hypervisor. [ 1067.197409] env[61905]: DEBUG nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1067.198169] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bb915a-498c-454b-965e-f35a13afc3bf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.400620] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.400782] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.715899] env[61905]: INFO nova.compute.manager [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Took 11.07 seconds to build instance. [ 1067.906039] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1067.906039] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1068.218284] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ef60f4e6-33bc-4a3f-939a-6e55df89efe2 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.577s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.730163] env[61905]: DEBUG nova.compute.manager [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Received event network-changed-a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1068.730402] env[61905]: DEBUG nova.compute.manager [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Refreshing instance network info cache due to event network-changed-a105c9d5-8ba9-40c5-ba4c-a35528f5779b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1068.730619] env[61905]: DEBUG oslo_concurrency.lockutils [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] Acquiring lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1068.730794] env[61905]: DEBUG oslo_concurrency.lockutils [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] Acquired lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.731135] env[61905]: DEBUG nova.network.neutron [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Refreshing network info cache for port a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.913172] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Didn't find any instances for network info cache update. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1068.913414] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.404336] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1069.434017] env[61905]: DEBUG nova.network.neutron [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updated VIF entry in instance network info cache for port a105c9d5-8ba9-40c5-ba4c-a35528f5779b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.434400] env[61905]: DEBUG nova.network.neutron [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.907607] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.908150] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.908301] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.908519] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1069.909551] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ce6da5-c3a1-4dfd-a730-ce5c9bdda87a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.917705] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b98e35-7010-4c32-887d-5e6216ff8cbe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.931843] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457fe4f3-b3d7-4c8f-a81d-7f82d6439971 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.938015] env[61905]: DEBUG oslo_concurrency.lockutils [req-33ad6fb9-651d-4ac5-905a-62ff4c05e474 req-67c09da0-5628-45fe-819c-df31eaa9f357 service nova] Releasing lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.939264] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6fac0e-c654-4ea7-ae18-de0e64e37315 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.970628] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181006MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1069.970782] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.970901] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.123709] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 02a40a20-1506-48f2-bbd2-db62e5dfa166 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.123982] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c027ecd4-9502-4333-b0ac-315be7240d6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.123982] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance c8ad7fcb-4678-40cd-89af-e13de828579b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.124171] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.124371] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1071.124512] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1071.178085] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cd77ba-8908-4662-87cb-80f3b3ba05e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.185385] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89c3e0d-1042-48f4-b28c-20627d40df8f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.215833] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffde3d25-9681-4bd0-8ac1-34f60b1c3621 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.223323] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9bd3d7-a385-4a7c-b59a-2bf97e178306 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.236333] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.740032] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1072.244701] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1072.244940] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.274s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.245146] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.245273] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Cleaning up deleted instances {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1072.757994] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] There are 43 instances to clean {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1072.758262] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 49305caf-e169-4c03-9968-be40567b92c7] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.261452] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 817117bb-1728-42a1-ac2a-6ba284c65fa3] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.765483] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 7e57b01e-3c79-4c6d-8e1a-983e2fb0df20] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.268477] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 15c966fe-6cb3-40ba-a6ac-8c8fe12ce3fa] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.772488] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: fb417a53-b6df-4566-87f2-bd56dafd789c] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.275847] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 94df3cde-9330-41a1-bbec-1ce2a76551d6] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.779452] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c79ae168-cf98-4b0a-a55d-a39d66f82462] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.282868] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 020dc38a-d4ea-41fa-a3aa-3eb63b3516d9] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.787055] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e810c9bb-ffb1-47f2-bc23-375520a2f50d] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.290840] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 30862de5-1cfa-494a-a81d-1215a3580339] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.794156] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 26375621-b272-4243-95bd-5cf5b946cec4] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.297019] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 7bb17b60-268a-4670-beb8-df5232a698ae] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.800525] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e50cac29-797e-44a2-aafc-868e45ffd9cc] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.304503] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 4eba4203-0e35-4c56-b24f-3ac47a7a8b83] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.807460] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 55a9190b-52f7-4bba-81b0-079e62537183] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.311180] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 63eb2219-fea2-4af0-90d2-e8d9ac53a138] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.814559] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: b9400202-eb37-4c75-bbf3-807edb7bc16f] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.982680] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "c027ecd4-9502-4333-b0ac-315be7240d6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.982993] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.983248] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "c027ecd4-9502-4333-b0ac-315be7240d6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.983455] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.983638] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.986789] env[61905]: INFO nova.compute.manager [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Terminating instance [ 1080.991837] env[61905]: DEBUG nova.compute.manager [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1080.992087] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.992956] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79538e4-504c-4298-9f43-676ea609f67d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.000734] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.000985] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16a2926c-e466-4f53-a036-47f4522cdbc8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.008691] env[61905]: DEBUG oslo_vmware.api [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1081.008691] env[61905]: value = "task-1362957" [ 1081.008691] env[61905]: _type = "Task" [ 1081.008691] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.015958] env[61905]: DEBUG oslo_vmware.api [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.318638] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 22b6d87c-08c5-492c-a963-f7ad6ef5db5b] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.519056] env[61905]: DEBUG oslo_vmware.api [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362957, 'name': PowerOffVM_Task, 'duration_secs': 0.175095} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.519323] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1081.519502] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1081.519753] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b83de277-a01d-4906-ae04-0d1ef40b79e4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.582294] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1081.582506] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1081.582669] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleting the datastore file [datastore1] c027ecd4-9502-4333-b0ac-315be7240d6d {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1081.582943] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94227f7a-d737-4a86-8881-b3ac98d78057 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.590315] env[61905]: DEBUG oslo_vmware.api [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1081.590315] env[61905]: value = "task-1362959" [ 1081.590315] env[61905]: _type = "Task" [ 1081.590315] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.597859] env[61905]: DEBUG oslo_vmware.api [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.822390] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 7b0db0a2-c990-4160-9be8-018239425114] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.100495] env[61905]: DEBUG oslo_vmware.api [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13371} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.100779] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.101015] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.101289] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.101493] env[61905]: INFO nova.compute.manager [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1082.101763] env[61905]: DEBUG oslo.service.loopingcall [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1082.101958] env[61905]: DEBUG nova.compute.manager [-] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1082.102066] env[61905]: DEBUG nova.network.neutron [-] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1082.325792] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: ebf7849c-716f-4b4c-bb9c-42c090d0b3c0] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.530640] env[61905]: DEBUG nova.compute.manager [req-7e2f1f71-e392-4581-9407-da4fcfb2b544 req-ce05e231-07a8-4e9f-95b8-853d7bf90029 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Received event network-vif-deleted-7d029270-bd48-4a02-9caa-ba9efa46aaee {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1082.530844] env[61905]: INFO nova.compute.manager [req-7e2f1f71-e392-4581-9407-da4fcfb2b544 req-ce05e231-07a8-4e9f-95b8-853d7bf90029 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Neutron deleted interface 7d029270-bd48-4a02-9caa-ba9efa46aaee; detaching it from the instance and deleting it from the info cache [ 1082.531032] env[61905]: DEBUG nova.network.neutron [req-7e2f1f71-e392-4581-9407-da4fcfb2b544 req-ce05e231-07a8-4e9f-95b8-853d7bf90029 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.828909] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 27c3ed56-d24e-47d1-9c39-43b3b88a59b9] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.009935] env[61905]: DEBUG nova.network.neutron [-] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.034417] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4c979752-121c-40ad-9326-fd981bc2e37f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.047943] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9356bbb-fbb8-4911-b814-0be4f369aa87 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.079311] env[61905]: DEBUG nova.compute.manager [req-7e2f1f71-e392-4581-9407-da4fcfb2b544 req-ce05e231-07a8-4e9f-95b8-853d7bf90029 service nova] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Detach interface failed, port_id=7d029270-bd48-4a02-9caa-ba9efa46aaee, reason: Instance c027ecd4-9502-4333-b0ac-315be7240d6d could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1083.332041] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e9e00459-e685-431b-b194-cf426c7a743e] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.512418] env[61905]: INFO nova.compute.manager [-] [instance: c027ecd4-9502-4333-b0ac-315be7240d6d] Took 1.41 seconds to deallocate network for instance. [ 1083.836642] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 0f7ccb34-cb14-4b21-ae61-b066427d400e] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.018984] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.019284] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.019513] env[61905]: DEBUG nova.objects.instance [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'resources' on Instance uuid c027ecd4-9502-4333-b0ac-315be7240d6d {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.340143] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 299479fb-9a94-40b8-928d-8e491dbe1af1] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.582272] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cd56b5-4aef-47e5-92b3-9d60e7d8e1fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.590195] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94e6f83-70e7-4733-b669-e558a0f3a508 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.619797] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f59631-33ed-4213-8c12-2fa087b7a465 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.626700] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe25d739-3af9-4649-b207-f3054a52f0e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.639150] env[61905]: DEBUG nova.compute.provider_tree [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.843609] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 1502df44-9166-4ce8-9117-a57e7be2d299] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.142674] env[61905]: DEBUG nova.scheduler.client.report [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1085.347117] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 2c919b69-0e09-431d-8a75-98d5740c7dab] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.647385] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.666611] env[61905]: INFO nova.scheduler.client.report [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted allocations for instance c027ecd4-9502-4333-b0ac-315be7240d6d [ 1085.850159] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 38b80675-182a-422c-9222-aa78ed59c351] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.174080] env[61905]: DEBUG oslo_concurrency.lockutils [None req-64466b60-f916-4260-99c6-eceba3015805 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "c027ecd4-9502-4333-b0ac-315be7240d6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.191s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.354301] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e3b11ed6-b703-43a6-a528-28520ed43233] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.857713] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 020f97b7-e3e4-44e1-9ad2-97e3ed671f7e] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.909704] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.909969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.361495] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 4bb7a2df-b472-4f6d-8a01-a55d0b86efda] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.412430] env[61905]: INFO nova.compute.manager [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Detaching volume 176d041a-4619-4a02-8a96-6f7ebfd757ae [ 1087.443623] env[61905]: INFO nova.virt.block_device [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Attempting to driver detach volume 176d041a-4619-4a02-8a96-6f7ebfd757ae from mountpoint /dev/sdb [ 1087.443912] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1087.444120] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290104', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'name': 'volume-176d041a-4619-4a02-8a96-6f7ebfd757ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '02a40a20-1506-48f2-bbd2-db62e5dfa166', 'attached_at': '', 'detached_at': '', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'serial': '176d041a-4619-4a02-8a96-6f7ebfd757ae'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1087.444955] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84164a47-661c-4316-9120-72b1d05f6899 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.466390] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb2eaac-4703-4259-a0f9-ee32d6300f27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.472989] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8430abd-08ce-4c4b-ace6-58c4c31b04d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.493502] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fefd94-d42d-4df8-9885-bce029666217 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.507624] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] The volume has not been displaced from its original location: [datastore2] volume-176d041a-4619-4a02-8a96-6f7ebfd757ae/volume-176d041a-4619-4a02-8a96-6f7ebfd757ae.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1087.512708] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1087.512965] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f062aae-cb4d-4c10-8f05-a1ce8a071797 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.530361] env[61905]: DEBUG oslo_vmware.api [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1087.530361] env[61905]: value = "task-1362960" [ 1087.530361] env[61905]: _type = "Task" [ 1087.530361] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.537735] env[61905]: DEBUG oslo_vmware.api [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.864685] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 9a385d72-ba5d-48e0-b71f-d37d4e63c403] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.039571] env[61905]: DEBUG oslo_vmware.api [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362960, 'name': ReconfigVM_Task, 'duration_secs': 0.235031} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.039857] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1088.044386] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fe41215-76b9-42fb-97e8-cb2f56fa9246 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.059505] env[61905]: DEBUG oslo_vmware.api [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1088.059505] env[61905]: value = "task-1362961" [ 1088.059505] env[61905]: _type = "Task" [ 1088.059505] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.069690] env[61905]: DEBUG oslo_vmware.api [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362961, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.368414] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: a6e45dd1-e0ee-4bda-9513-4b1000e15e49] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.569014] env[61905]: DEBUG oslo_vmware.api [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362961, 'name': ReconfigVM_Task, 'duration_secs': 0.129832} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.569336] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290104', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'name': 'volume-176d041a-4619-4a02-8a96-6f7ebfd757ae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '02a40a20-1506-48f2-bbd2-db62e5dfa166', 'attached_at': '', 'detached_at': '', 'volume_id': '176d041a-4619-4a02-8a96-6f7ebfd757ae', 'serial': '176d041a-4619-4a02-8a96-6f7ebfd757ae'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1088.871475] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e1a22f3e-4557-44d2-8e34-cc75f573fe41] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.109544] env[61905]: DEBUG nova.objects.instance [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'flavor' on Instance uuid 02a40a20-1506-48f2-bbd2-db62e5dfa166 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.374824] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: a9ac365e-2be1-438d-a514-6fa7b26fa10c] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.878668] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 12c21d8e-1941-4481-9216-015ba6c09b9b] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.117106] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ac77ee82-7cab-4f04-9ecc-1bd055274a9b tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.207s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.382316] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 111d10e8-7e36-48b6-be45-2275c36fbee4] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.885925] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 74f94a46-63e4-44e0-9142-7e7d46cd31a7] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.139733] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.140050] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.140278] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "02a40a20-1506-48f2-bbd2-db62e5dfa166-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.140535] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.140709] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.142864] env[61905]: INFO nova.compute.manager [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Terminating instance [ 1091.144577] env[61905]: DEBUG nova.compute.manager [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1091.144775] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.145613] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f774f2-292c-419f-95b0-0a6c3c212076 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.153332] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.153820] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6740a07c-4328-4993-883e-0e30edbf3ae9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.160576] env[61905]: DEBUG oslo_vmware.api [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1091.160576] env[61905]: value = "task-1362962" [ 1091.160576] env[61905]: _type = "Task" [ 1091.160576] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.168218] env[61905]: DEBUG oslo_vmware.api [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362962, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.389246] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: aeb72a57-d319-479d-a1c7-3cebc6f73f09] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1091.670392] env[61905]: DEBUG oslo_vmware.api [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362962, 'name': PowerOffVM_Task, 'duration_secs': 0.146795} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.670715] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1091.670892] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1091.671150] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b7306f7-9606-4daa-8120-31b097a218e3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.747309] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.747500] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.747813] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleting the datastore file [datastore2] 02a40a20-1506-48f2-bbd2-db62e5dfa166 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.748182] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3789eddb-a458-4eb0-8ef5-ce1fcc74e1f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.755162] env[61905]: DEBUG oslo_vmware.api [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1091.755162] env[61905]: value = "task-1362964" [ 1091.755162] env[61905]: _type = "Task" [ 1091.755162] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.762325] env[61905]: DEBUG oslo_vmware.api [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.892126] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: ba3a1e36-a9f8-4482-908e-9c949c6f42ec] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1092.264516] env[61905]: DEBUG oslo_vmware.api [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130358} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.264765] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.264947] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.265142] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.265320] env[61905]: INFO nova.compute.manager [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1092.265564] env[61905]: DEBUG oslo.service.loopingcall [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.265758] env[61905]: DEBUG nova.compute.manager [-] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1092.265852] env[61905]: DEBUG nova.network.neutron [-] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.395615] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: fed05097-de84-4617-bf9e-7fc116ebc56e] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1092.686059] env[61905]: DEBUG nova.compute.manager [req-1e51fa40-a45c-42e2-9e4e-a80cdaeec1e9 req-1dc2b5f0-b3db-4ed2-a97a-9924b6680597 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Received event network-vif-deleted-3d82d8e0-2624-4d0b-a98b-1cfd93ccf628 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1092.686059] env[61905]: INFO nova.compute.manager [req-1e51fa40-a45c-42e2-9e4e-a80cdaeec1e9 req-1dc2b5f0-b3db-4ed2-a97a-9924b6680597 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Neutron deleted interface 3d82d8e0-2624-4d0b-a98b-1cfd93ccf628; detaching it from the instance and deleting it from the info cache [ 1092.686059] env[61905]: DEBUG nova.network.neutron [req-1e51fa40-a45c-42e2-9e4e-a80cdaeec1e9 req-1dc2b5f0-b3db-4ed2-a97a-9924b6680597 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.900020] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: d31570f0-7662-4e13-9dee-51dc66728acc] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1093.156260] env[61905]: DEBUG nova.network.neutron [-] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.187514] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e3f9433-6c3f-4022-9688-27ee2dea6f99 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.197440] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559f76b6-9a22-4c84-95c3-b55e8942e10c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.221061] env[61905]: DEBUG nova.compute.manager [req-1e51fa40-a45c-42e2-9e4e-a80cdaeec1e9 req-1dc2b5f0-b3db-4ed2-a97a-9924b6680597 service nova] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Detach interface failed, port_id=3d82d8e0-2624-4d0b-a98b-1cfd93ccf628, reason: Instance 02a40a20-1506-48f2-bbd2-db62e5dfa166 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1093.403146] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: a4a03b8a-3206-4684-9d85-0e60ac643175] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1093.658838] env[61905]: INFO nova.compute.manager [-] [instance: 02a40a20-1506-48f2-bbd2-db62e5dfa166] Took 1.39 seconds to deallocate network for instance. [ 1093.907301] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: 60e68738-a333-44b2-a1e8-0b3da728059e] Instance has had 0 of 5 cleanup attempts {{(pid=61905) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1094.167592] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.167592] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.167592] env[61905]: DEBUG nova.objects.instance [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'resources' on Instance uuid 02a40a20-1506-48f2-bbd2-db62e5dfa166 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.410858] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.721724] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c44b44-6908-4e0a-8bf4-191e692531f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.729102] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ecfc10-6102-42c2-8fe4-54eb321c44c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.758021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b564f098-1d5b-4b4e-8aa8-d1cb5a5a1f88 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.765588] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a8d337-fecf-4a30-9390-09e7505d629d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.778541] env[61905]: DEBUG nova.compute.provider_tree [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.282095] env[61905]: DEBUG nova.scheduler.client.report [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.787552] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.808475] env[61905]: INFO nova.scheduler.client.report [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted allocations for instance 02a40a20-1506-48f2-bbd2-db62e5dfa166 [ 1096.317100] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d3b425e7-d624-4466-90ae-5c4d0eb18a01 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "02a40a20-1506-48f2-bbd2-db62e5dfa166" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.177s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.275035] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.275035] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.778769] env[61905]: DEBUG nova.compute.utils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1097.968877] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.969135] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.281969] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.471618] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1098.989816] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.990100] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.991622] env[61905]: INFO nova.compute.claims [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.342555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.342943] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.343290] env[61905]: INFO nova.compute.manager [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Attaching volume d11f18ed-eb44-480b-8a82-489af0b26f5b to /dev/sdb [ 1099.374258] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af4e48b-288e-4422-a5a5-9d20259cd63f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.381327] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c53b203-4bcd-4330-8c72-88a9a73083fd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.394441] env[61905]: DEBUG nova.virt.block_device [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating existing volume attachment record: 75064683-f0c5-4d3a-9984-7a324660fbe0 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1100.045204] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b22d036-6ef9-45fd-b702-b4544db799ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.053886] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d6c5eb-33e7-4763-8cdb-82b02c82a1fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.082452] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc82f54-f2f9-4d1f-a093-5279ef1fadb8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.089453] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e48484-93b1-44c1-985a-db180d8aff32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.102691] env[61905]: DEBUG nova.compute.provider_tree [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.605833] env[61905]: DEBUG nova.scheduler.client.report [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.111155] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.111716] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1101.617617] env[61905]: DEBUG nova.compute.utils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1101.619481] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1101.619652] env[61905]: DEBUG nova.network.neutron [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1101.671882] env[61905]: DEBUG nova.policy [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '302950aecbc54ee0843853aac306fab2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28c767f45ae54b8fbfe2c93fc9027447', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1101.946260] env[61905]: DEBUG nova.network.neutron [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Successfully created port: ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.122734] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1103.132372] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1103.156100] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.156369] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.156544] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.156733] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.156880] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.157040] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.157249] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.157407] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.157576] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.157738] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.157911] env[61905]: DEBUG nova.virt.hardware [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.158780] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb86756-88f8-4492-8527-0fa2061201e0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.166555] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54da613-c7f7-4e4d-a788-3bed72898e65 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.330213] env[61905]: DEBUG nova.compute.manager [req-5bec2026-1430-42c6-a91c-d48141eb60d4 req-025204aa-6fbe-4294-a95b-a98c64a008d9 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Received event network-vif-plugged-ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1103.330439] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bec2026-1430-42c6-a91c-d48141eb60d4 req-025204aa-6fbe-4294-a95b-a98c64a008d9 service nova] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.330655] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bec2026-1430-42c6-a91c-d48141eb60d4 req-025204aa-6fbe-4294-a95b-a98c64a008d9 service nova] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.330831] env[61905]: DEBUG oslo_concurrency.lockutils [req-5bec2026-1430-42c6-a91c-d48141eb60d4 req-025204aa-6fbe-4294-a95b-a98c64a008d9 service nova] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.331008] env[61905]: DEBUG nova.compute.manager [req-5bec2026-1430-42c6-a91c-d48141eb60d4 req-025204aa-6fbe-4294-a95b-a98c64a008d9 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] No waiting events found dispatching network-vif-plugged-ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1103.331184] env[61905]: WARNING nova.compute.manager [req-5bec2026-1430-42c6-a91c-d48141eb60d4 req-025204aa-6fbe-4294-a95b-a98c64a008d9 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Received unexpected event network-vif-plugged-ccea4ae3-1473-40c2-975d-7af1688f089b for instance with vm_state building and task_state spawning. [ 1103.412207] env[61905]: DEBUG nova.network.neutron [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Successfully updated port: ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1103.915378] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.915577] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.915757] env[61905]: DEBUG nova.network.neutron [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1103.938451] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1103.938736] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290111', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'name': 'volume-d11f18ed-eb44-480b-8a82-489af0b26f5b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'serial': 'd11f18ed-eb44-480b-8a82-489af0b26f5b'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1103.939757] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b711ed-0684-4c3e-a99a-df7b0a0461a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.956201] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd8806f-74d2-4795-a915-626fda94f81d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.979277] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-d11f18ed-eb44-480b-8a82-489af0b26f5b/volume-d11f18ed-eb44-480b-8a82-489af0b26f5b.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.979544] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e4c8c26-cdd8-4430-aeac-3cd96196e69e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.997239] env[61905]: DEBUG oslo_vmware.api [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1103.997239] env[61905]: value = "task-1362967" [ 1103.997239] env[61905]: _type = "Task" [ 1103.997239] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.007968] env[61905]: DEBUG oslo_vmware.api [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362967, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.447716] env[61905]: DEBUG nova.network.neutron [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1104.507050] env[61905]: DEBUG oslo_vmware.api [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362967, 'name': ReconfigVM_Task, 'duration_secs': 0.33393} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.509419] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-d11f18ed-eb44-480b-8a82-489af0b26f5b/volume-d11f18ed-eb44-480b-8a82-489af0b26f5b.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.513996] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea232826-d95d-4401-ad5c-70f6489f7a0e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.528960] env[61905]: DEBUG oslo_vmware.api [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1104.528960] env[61905]: value = "task-1362968" [ 1104.528960] env[61905]: _type = "Task" [ 1104.528960] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.542518] env[61905]: DEBUG oslo_vmware.api [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362968, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.604791] env[61905]: DEBUG nova.network.neutron [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updating instance_info_cache with network_info: [{"id": "ccea4ae3-1473-40c2-975d-7af1688f089b", "address": "fa:16:3e:5b:d8:cd", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccea4ae3-14", "ovs_interfaceid": "ccea4ae3-1473-40c2-975d-7af1688f089b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.038857] env[61905]: DEBUG oslo_vmware.api [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362968, 'name': ReconfigVM_Task, 'duration_secs': 0.138358} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.039235] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290111', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'name': 'volume-d11f18ed-eb44-480b-8a82-489af0b26f5b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'serial': 'd11f18ed-eb44-480b-8a82-489af0b26f5b'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1105.107852] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.107852] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Instance network_info: |[{"id": "ccea4ae3-1473-40c2-975d-7af1688f089b", "address": "fa:16:3e:5b:d8:cd", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccea4ae3-14", "ovs_interfaceid": "ccea4ae3-1473-40c2-975d-7af1688f089b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1105.107852] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:d8:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccea4ae3-1473-40c2-975d-7af1688f089b', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.115159] env[61905]: DEBUG oslo.service.loopingcall [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.115380] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.115596] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b40597ce-0262-4738-8485-38ce833a95c1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.136650] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.136650] env[61905]: value = "task-1362969" [ 1105.136650] env[61905]: _type = "Task" [ 1105.136650] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.144124] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362969, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.359675] env[61905]: DEBUG nova.compute.manager [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Received event network-changed-ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1105.359878] env[61905]: DEBUG nova.compute.manager [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Refreshing instance network info cache due to event network-changed-ccea4ae3-1473-40c2-975d-7af1688f089b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1105.360116] env[61905]: DEBUG oslo_concurrency.lockutils [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] Acquiring lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.360265] env[61905]: DEBUG oslo_concurrency.lockutils [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] Acquired lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.360426] env[61905]: DEBUG nova.network.neutron [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Refreshing network info cache for port ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.646321] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362969, 'name': CreateVM_Task, 'duration_secs': 0.329011} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.647864] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.647864] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.647997] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.648331] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1105.648592] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba02de36-f982-49f9-ad70-7b3c7dbf0769 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.652940] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1105.652940] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fe0874-5dc1-62d9-168e-b33916655823" [ 1105.652940] env[61905]: _type = "Task" [ 1105.652940] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.660618] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fe0874-5dc1-62d9-168e-b33916655823, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.779185] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.779433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.067139] env[61905]: DEBUG nova.network.neutron [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updated VIF entry in instance network info cache for port ccea4ae3-1473-40c2-975d-7af1688f089b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.067530] env[61905]: DEBUG nova.network.neutron [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updating instance_info_cache with network_info: [{"id": "ccea4ae3-1473-40c2-975d-7af1688f089b", "address": "fa:16:3e:5b:d8:cd", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccea4ae3-14", "ovs_interfaceid": "ccea4ae3-1473-40c2-975d-7af1688f089b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.076012] env[61905]: DEBUG nova.objects.instance [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.163440] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52fe0874-5dc1-62d9-168e-b33916655823, 'name': SearchDatastore_Task, 'duration_secs': 0.010859} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.163742] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.163974] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.164245] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.164427] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.164693] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.165101] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc0ea0a7-5276-4520-9bd4-95c205a0543c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.172985] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.173183] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.173865] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e740115-0a85-4169-818c-0ba1d57343fa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.178512] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1106.178512] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b74444-7bd2-71eb-0c6c-6eb39e85dfc0" [ 1106.178512] env[61905]: _type = "Task" [ 1106.178512] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.187224] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b74444-7bd2-71eb-0c6c-6eb39e85dfc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.282191] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1106.570435] env[61905]: DEBUG oslo_concurrency.lockutils [req-cacab601-c780-4dc3-9e33-e5546f8d4842 req-f525d863-8dbb-44b6-97b6-331a25081584 service nova] Releasing lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.581388] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1604aeda-1a5b-4d8b-9426-3bbd12b834a8 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.238s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.688787] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b74444-7bd2-71eb-0c6c-6eb39e85dfc0, 'name': SearchDatastore_Task, 'duration_secs': 0.008269} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.689653] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-339dae3d-65bb-4dac-82a5-17ed702dcaef {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.694558] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1106.694558] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f24f31-ad12-a8bc-47b6-7bb49278ebc5" [ 1106.694558] env[61905]: _type = "Task" [ 1106.694558] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.701610] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f24f31-ad12-a8bc-47b6-7bb49278ebc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.804831] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.805117] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.806537] env[61905]: INFO nova.compute.claims [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1107.205968] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f24f31-ad12-a8bc-47b6-7bb49278ebc5, 'name': SearchDatastore_Task, 'duration_secs': 0.008749} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.205968] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.205968] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516/9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.206273] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ed49daa-acef-48bf-9424-4408045fa734 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.212007] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1107.212007] env[61905]: value = "task-1362970" [ 1107.212007] env[61905]: _type = "Task" [ 1107.212007] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.219260] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.439218] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.439218] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.721575] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461316} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.721936] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516/9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1107.722032] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1107.722249] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94d50bc2-365f-40f0-b415-dafb37463956 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.728620] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1107.728620] env[61905]: value = "task-1362971" [ 1107.728620] env[61905]: _type = "Task" [ 1107.728620] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.735737] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362971, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.870659] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8344ad-2899-4346-9a45-02fce551bc99 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.877706] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109cc59d-8911-4da8-9dad-b91ea7aeff62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.907230] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0795fdcf-00c5-4db1-8655-60527f71be76 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.913992] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dbd392-7faf-4a47-bf1c-c85230bf3d09 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.926844] env[61905]: DEBUG nova.compute.provider_tree [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.942995] env[61905]: DEBUG nova.compute.utils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1108.237779] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362971, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057374} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.238092] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.238870] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388c5311-2761-48ef-a7f4-8aec346d4d04 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.260295] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516/9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.260527] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab372eb-83ef-42ad-a570-364a0a161d75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.279565] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1108.279565] env[61905]: value = "task-1362972" [ 1108.279565] env[61905]: _type = "Task" [ 1108.279565] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.286622] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362972, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.430071] env[61905]: DEBUG nova.scheduler.client.report [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1108.445735] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.789947] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362972, 'name': ReconfigVM_Task, 'duration_secs': 0.264368} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.790339] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516/9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1108.790866] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-491ab836-4667-44d0-92ff-5bb1440ac693 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.797127] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1108.797127] env[61905]: value = "task-1362973" [ 1108.797127] env[61905]: _type = "Task" [ 1108.797127] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.809470] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362973, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.935709] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.130s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.936247] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1109.307339] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362973, 'name': Rename_Task, 'duration_secs': 0.127369} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.307606] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1109.307856] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e51b4893-a312-4074-94cf-7a7be7dcd5d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.314605] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1109.314605] env[61905]: value = "task-1362974" [ 1109.314605] env[61905]: _type = "Task" [ 1109.314605] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.321725] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362974, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.442066] env[61905]: DEBUG nova.compute.utils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1109.443606] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1109.443798] env[61905]: DEBUG nova.network.neutron [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.483054] env[61905]: DEBUG nova.policy [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b126f47b9df4f4586f377f70faada62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edbab61ee8984d0c91eab473eba0047c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1109.507009] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.507403] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.507851] env[61905]: INFO nova.compute.manager [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Attaching volume aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9 to /dev/sdc [ 1109.546230] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bec2f1-5b4c-43ff-b219-07d5a36ee8f5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.555924] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2c5337-e83b-4081-b0a1-95fbe8b4201e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.569574] env[61905]: DEBUG nova.virt.block_device [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating existing volume attachment record: f4a5e818-8e50-4f90-aba8-99055c4af91b {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1109.749542] env[61905]: DEBUG nova.network.neutron [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Successfully created port: 834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1109.824252] env[61905]: DEBUG oslo_vmware.api [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1362974, 'name': PowerOnVM_Task, 'duration_secs': 0.456444} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.824622] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1109.824731] env[61905]: INFO nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Took 6.69 seconds to spawn the instance on the hypervisor. [ 1109.824940] env[61905]: DEBUG nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1109.825712] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af75d716-2e0a-4a79-a981-1ecdcfd0a52b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.946584] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1110.343974] env[61905]: INFO nova.compute.manager [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Took 11.37 seconds to build instance. [ 1110.396272] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_power_states {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1110.846383] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0b845b41-165e-49ce-9509-f824cfe0bf0a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.877s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.900069] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Getting list of instances from cluster (obj){ [ 1110.900069] env[61905]: value = "domain-c8" [ 1110.900069] env[61905]: _type = "ClusterComputeResource" [ 1110.900069] env[61905]: } {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1110.901189] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b34e2e-7aaa-4c5e-8168-0f4c0c7865c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.916056] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Got total of 3 instances {{(pid=61905) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1110.916232] env[61905]: WARNING nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] While synchronizing instance power states, found 4 instances in the database and 3 instances on the hypervisor. [ 1110.916347] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Triggering sync for uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1110.916534] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Triggering sync for uuid bcca8c7b-3e80-4895-ac56-d5aa05d482e5 {{(pid=61905) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1110.916687] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Triggering sync for uuid 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 {{(pid=61905) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1110.916837] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Triggering sync for uuid e2ebe77a-0a93-4d53-bb47-acfef6aeb710 {{(pid=61905) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1110.917183] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.917434] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.917633] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.917901] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.918100] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.918339] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.919166] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ae69b7-5dc1-4fdd-9ab2-2705f6ba987b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.922240] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78622b4d-9dea-45fc-8432-9f62899a92c1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.956344] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1110.982245] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1110.982566] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1110.982744] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.982935] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1110.983100] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.983323] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1110.983460] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1110.983623] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1110.983808] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1110.983968] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1110.984158] env[61905]: DEBUG nova.virt.hardware [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1110.985027] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d41f486-2950-48d3-9114-d86af01b7284 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.996300] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73cd88e-e137-471b-80d9-348c8efb69af {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.143335] env[61905]: DEBUG nova.compute.manager [req-e6916440-89d5-4cb1-ab43-b7b620e30a78 req-de144023-5584-41ce-8a15-9247ca601616 service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Received event network-vif-plugged-834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1111.143564] env[61905]: DEBUG oslo_concurrency.lockutils [req-e6916440-89d5-4cb1-ab43-b7b620e30a78 req-de144023-5584-41ce-8a15-9247ca601616 service nova] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.143797] env[61905]: DEBUG oslo_concurrency.lockutils [req-e6916440-89d5-4cb1-ab43-b7b620e30a78 req-de144023-5584-41ce-8a15-9247ca601616 service nova] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.143983] env[61905]: DEBUG oslo_concurrency.lockutils [req-e6916440-89d5-4cb1-ab43-b7b620e30a78 req-de144023-5584-41ce-8a15-9247ca601616 service nova] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.145772] env[61905]: DEBUG nova.compute.manager [req-e6916440-89d5-4cb1-ab43-b7b620e30a78 req-de144023-5584-41ce-8a15-9247ca601616 service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] No waiting events found dispatching network-vif-plugged-834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1111.145772] env[61905]: WARNING nova.compute.manager [req-e6916440-89d5-4cb1-ab43-b7b620e30a78 req-de144023-5584-41ce-8a15-9247ca601616 service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Received unexpected event network-vif-plugged-834d4e50-6879-4fde-9aeb-316ff08637ac for instance with vm_state building and task_state spawning. [ 1111.357849] env[61905]: DEBUG nova.compute.manager [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Received event network-changed-ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1111.358081] env[61905]: DEBUG nova.compute.manager [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Refreshing instance network info cache due to event network-changed-ccea4ae3-1473-40c2-975d-7af1688f089b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1111.358274] env[61905]: DEBUG oslo_concurrency.lockutils [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] Acquiring lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.358414] env[61905]: DEBUG oslo_concurrency.lockutils [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] Acquired lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.358622] env[61905]: DEBUG nova.network.neutron [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Refreshing network info cache for port ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1111.433083] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.515s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.433450] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.515s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.700106] env[61905]: DEBUG nova.network.neutron [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Successfully updated port: 834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.722741] env[61905]: DEBUG nova.compute.manager [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Received event network-changed-834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1111.722741] env[61905]: DEBUG nova.compute.manager [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Refreshing instance network info cache due to event network-changed-834d4e50-6879-4fde-9aeb-316ff08637ac. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1111.723070] env[61905]: DEBUG oslo_concurrency.lockutils [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] Acquiring lock "refresh_cache-e2ebe77a-0a93-4d53-bb47-acfef6aeb710" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1111.723129] env[61905]: DEBUG oslo_concurrency.lockutils [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] Acquired lock "refresh_cache-e2ebe77a-0a93-4d53-bb47-acfef6aeb710" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.723271] env[61905]: DEBUG nova.network.neutron [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Refreshing network info cache for port 834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1112.075064] env[61905]: DEBUG nova.network.neutron [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updated VIF entry in instance network info cache for port ccea4ae3-1473-40c2-975d-7af1688f089b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1112.075446] env[61905]: DEBUG nova.network.neutron [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updating instance_info_cache with network_info: [{"id": "ccea4ae3-1473-40c2-975d-7af1688f089b", "address": "fa:16:3e:5b:d8:cd", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccea4ae3-14", "ovs_interfaceid": "ccea4ae3-1473-40c2-975d-7af1688f089b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.202753] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-e2ebe77a-0a93-4d53-bb47-acfef6aeb710" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.253327] env[61905]: DEBUG nova.network.neutron [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.325300] env[61905]: DEBUG nova.network.neutron [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.578129] env[61905]: DEBUG oslo_concurrency.lockutils [req-a208fa3f-5add-4825-a95b-0a32115e781b req-1e21a479-f065-4d7f-8851-4504e3544c8a service nova] Releasing lock "refresh_cache-9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.828450] env[61905]: DEBUG oslo_concurrency.lockutils [req-8c64e612-c9c5-447d-bcc3-8381bfb3709c req-08236575-1eda-456e-9317-3139ac4bfa0c service nova] Releasing lock "refresh_cache-e2ebe77a-0a93-4d53-bb47-acfef6aeb710" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.828934] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-e2ebe77a-0a93-4d53-bb47-acfef6aeb710" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.829183] env[61905]: DEBUG nova.network.neutron [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1113.382227] env[61905]: DEBUG nova.network.neutron [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1113.507237] env[61905]: DEBUG nova.network.neutron [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Updating instance_info_cache with network_info: [{"id": "834d4e50-6879-4fde-9aeb-316ff08637ac", "address": "fa:16:3e:61:ea:dd", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap834d4e50-68", "ovs_interfaceid": "834d4e50-6879-4fde-9aeb-316ff08637ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.009599] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-e2ebe77a-0a93-4d53-bb47-acfef6aeb710" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1114.009944] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance network_info: |[{"id": "834d4e50-6879-4fde-9aeb-316ff08637ac", "address": "fa:16:3e:61:ea:dd", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap834d4e50-68", "ovs_interfaceid": "834d4e50-6879-4fde-9aeb-316ff08637ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1114.010396] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:ea:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '834d4e50-6879-4fde-9aeb-316ff08637ac', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1114.017866] env[61905]: DEBUG oslo.service.loopingcall [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1114.018091] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1114.018317] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-131cbd5c-2d2b-44f1-ae8e-5b8b26d715d4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.039447] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1114.039447] env[61905]: value = "task-1362977" [ 1114.039447] env[61905]: _type = "Task" [ 1114.039447] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.050194] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362977, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.112939] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1114.113199] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290113', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'name': 'volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'serial': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1114.114149] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0e55e1-23de-4bd5-9413-25ead14a62f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.130148] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309b40a0-2c9c-47a6-927a-dbcda6f18f45 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.156437] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9/volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.156715] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f88e7e9-fb6e-40e0-997d-cae80e1e9328 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.175571] env[61905]: DEBUG oslo_vmware.api [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1114.175571] env[61905]: value = "task-1362978" [ 1114.175571] env[61905]: _type = "Task" [ 1114.175571] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.186310] env[61905]: DEBUG oslo_vmware.api [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.550066] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362977, 'name': CreateVM_Task, 'duration_secs': 0.308994} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.550437] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1114.550925] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.551112] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.551434] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1114.551713] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02fc877c-fae1-4c6b-a2bf-484c50cfca0d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.556365] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1114.556365] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d32daa-fb2b-6aaf-0286-06882ccb469a" [ 1114.556365] env[61905]: _type = "Task" [ 1114.556365] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.563438] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d32daa-fb2b-6aaf-0286-06882ccb469a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.684982] env[61905]: DEBUG oslo_vmware.api [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362978, 'name': ReconfigVM_Task, 'duration_secs': 0.338397} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.685268] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9/volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.689776] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb52888b-39b7-428f-813e-a45ca4fa8c92 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.704805] env[61905]: DEBUG oslo_vmware.api [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1114.704805] env[61905]: value = "task-1362979" [ 1114.704805] env[61905]: _type = "Task" [ 1114.704805] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.711945] env[61905]: DEBUG oslo_vmware.api [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.066755] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52d32daa-fb2b-6aaf-0286-06882ccb469a, 'name': SearchDatastore_Task, 'duration_secs': 0.012585} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.067071] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.067303] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1115.067532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1115.067680] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.067853] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1115.068120] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b4ac84a-c276-4f63-bdca-043cef00e086 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.075780] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1115.075973] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1115.076662] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb19cb2b-87d0-4a00-ac0b-27a66b429360 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.083048] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1115.083048] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c05845-0888-9382-1ee9-ff350099937c" [ 1115.083048] env[61905]: _type = "Task" [ 1115.083048] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.090188] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c05845-0888-9382-1ee9-ff350099937c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.214556] env[61905]: DEBUG oslo_vmware.api [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362979, 'name': ReconfigVM_Task, 'duration_secs': 0.128843} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.214789] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290113', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'name': 'volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'serial': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1115.593244] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52c05845-0888-9382-1ee9-ff350099937c, 'name': SearchDatastore_Task, 'duration_secs': 0.008361} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.593984] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf7d0ef-c3d1-42d0-9543-295da838d9ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.598789] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1115.598789] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525cec06-80e6-7e44-beaf-4b56dc16729b" [ 1115.598789] env[61905]: _type = "Task" [ 1115.598789] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.606152] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525cec06-80e6-7e44-beaf-4b56dc16729b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.109643] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]525cec06-80e6-7e44-beaf-4b56dc16729b, 'name': SearchDatastore_Task, 'duration_secs': 0.00915} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.109916] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1116.110181] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1116.110433] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7939ae2-3e49-413f-9b66-222fc05e51d0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.117642] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1116.117642] env[61905]: value = "task-1362980" [ 1116.117642] env[61905]: _type = "Task" [ 1116.117642] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.253886] env[61905]: DEBUG nova.objects.instance [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.627829] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457312} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.628188] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1116.628259] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1116.628496] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f343a9d5-f534-49d8-971a-ff50b4900664 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.634539] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1116.634539] env[61905]: value = "task-1362981" [ 1116.634539] env[61905]: _type = "Task" [ 1116.634539] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.642262] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.758714] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c2b548e9-3bca-4fd6-8caa-70399d106f34 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.251s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.759668] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.842s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.760683] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc3b5dd-13aa-4201-b6ee-f119b3bba8fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.069122] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.144696] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205768} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.144914] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1117.145716] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343fedf6-8202-4290-a515-738cec76a728 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.167854] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.168139] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f42767e2-e3f6-4c6f-829b-853c9acab481 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.188569] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1117.188569] env[61905]: value = "task-1362982" [ 1117.188569] env[61905]: _type = "Task" [ 1117.188569] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.197578] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.269879] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.270177] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.201s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.699286] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.773482] env[61905]: INFO nova.compute.manager [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Detaching volume d11f18ed-eb44-480b-8a82-489af0b26f5b [ 1117.805778] env[61905]: INFO nova.virt.block_device [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Attempting to driver detach volume d11f18ed-eb44-480b-8a82-489af0b26f5b from mountpoint /dev/sdb [ 1117.805778] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1117.805778] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290111', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'name': 'volume-d11f18ed-eb44-480b-8a82-489af0b26f5b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'serial': 'd11f18ed-eb44-480b-8a82-489af0b26f5b'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1117.806553] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f3d069-b5e2-470c-9610-2886aeb48f3a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.830838] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b863739-3efd-440d-a0d8-6cc5c486b0b2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.838051] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae85924a-4617-4b84-bc74-55a1c5b816e7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.861768] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4acdb9-085e-4ec8-9f0c-2495c0daaafd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.879050] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] The volume has not been displaced from its original location: [datastore2] volume-d11f18ed-eb44-480b-8a82-489af0b26f5b/volume-d11f18ed-eb44-480b-8a82-489af0b26f5b.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1117.883339] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1117.883690] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fdd8178-51c9-461c-a0b6-d832995b5a95 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.902833] env[61905]: DEBUG oslo_vmware.api [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1117.902833] env[61905]: value = "task-1362983" [ 1117.902833] env[61905]: _type = "Task" [ 1117.902833] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.912536] env[61905]: DEBUG oslo_vmware.api [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362983, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.199803] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362982, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.412660] env[61905]: DEBUG oslo_vmware.api [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362983, 'name': ReconfigVM_Task, 'duration_secs': 0.305913} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.412973] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1118.417535] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca4d0437-2f4b-4492-9f03-cd492e5af223 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.432310] env[61905]: DEBUG oslo_vmware.api [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1118.432310] env[61905]: value = "task-1362984" [ 1118.432310] env[61905]: _type = "Task" [ 1118.432310] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.442702] env[61905]: DEBUG oslo_vmware.api [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362984, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.700061] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362982, 'name': ReconfigVM_Task, 'duration_secs': 1.053404} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.700423] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Reconfigured VM instance instance-0000006b to attach disk [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.700971] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd19d991-000b-448f-ba6e-fd2d4fb161c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.706270] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1118.706270] env[61905]: value = "task-1362985" [ 1118.706270] env[61905]: _type = "Task" [ 1118.706270] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.713241] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362985, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.942209] env[61905]: DEBUG oslo_vmware.api [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362984, 'name': ReconfigVM_Task, 'duration_secs': 0.242161} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.942577] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290111', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'name': 'volume-d11f18ed-eb44-480b-8a82-489af0b26f5b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd11f18ed-eb44-480b-8a82-489af0b26f5b', 'serial': 'd11f18ed-eb44-480b-8a82-489af0b26f5b'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1119.216386] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362985, 'name': Rename_Task, 'duration_secs': 0.126448} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.216609] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1119.216845] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcdaf5e8-0c13-4030-b9a1-f02b10ae3766 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.223666] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1119.223666] env[61905]: value = "task-1362986" [ 1119.223666] env[61905]: _type = "Task" [ 1119.223666] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.235911] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.484689] env[61905]: DEBUG nova.objects.instance [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.733699] env[61905]: DEBUG oslo_vmware.api [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362986, 'name': PowerOnVM_Task, 'duration_secs': 0.444693} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.734042] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.734192] env[61905]: INFO nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Took 8.78 seconds to spawn the instance on the hypervisor. [ 1119.734373] env[61905]: DEBUG nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1119.735177] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8179d1-c122-4e38-8b6b-5d53beb46a97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.252503] env[61905]: INFO nova.compute.manager [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Took 13.46 seconds to build instance. [ 1120.322889] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.491920] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c97f082b-bc66-4f4c-8adc-c744dd70fffe tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.222s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.493497] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.171s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.754832] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0e97281e-6fba-48be-8c9a-a4992aeb0fdf tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.975s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.754832] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.836s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.754832] env[61905]: INFO nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 1120.755271] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.998604] env[61905]: INFO nova.compute.manager [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Detaching volume aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9 [ 1121.028496] env[61905]: INFO nova.virt.block_device [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Attempting to driver detach volume aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9 from mountpoint /dev/sdc [ 1121.028739] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1121.028926] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290113', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'name': 'volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'serial': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1121.029807] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994b32cb-aa39-4611-a144-387a506c1892 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.050372] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383dcf9a-5669-4e95-a56a-e658411f40e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.056686] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4677152f-7ec2-4b67-aed0-983a1a23ab47 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.075457] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e21a674-d896-4ddd-a418-b11d8fe103c9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.089610] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] The volume has not been displaced from its original location: [datastore1] volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9/volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1121.094704] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfiguring VM instance instance-00000068 to detach disk 2002 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1121.094981] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44628156-8c97-4824-a5c6-24febe1e2df6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.112881] env[61905]: DEBUG oslo_vmware.api [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1121.112881] env[61905]: value = "task-1362987" [ 1121.112881] env[61905]: _type = "Task" [ 1121.112881] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.122628] env[61905]: DEBUG oslo_vmware.api [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362987, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.339447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.339447] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.339447] env[61905]: DEBUG nova.compute.manager [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1121.339930] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0e7273-202c-4ac2-b37b-a560264db257 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.345965] env[61905]: DEBUG nova.compute.manager [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61905) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1121.346530] env[61905]: DEBUG nova.objects.instance [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'flavor' on Instance uuid e2ebe77a-0a93-4d53-bb47-acfef6aeb710 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.622103] env[61905]: DEBUG oslo_vmware.api [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362987, 'name': ReconfigVM_Task, 'duration_secs': 0.223393} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.622386] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Reconfigured VM instance instance-00000068 to detach disk 2002 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1121.626827] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf657f3d-9ff4-41d1-bb2c-2092c6f60d2b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.640831] env[61905]: DEBUG oslo_vmware.api [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1121.640831] env[61905]: value = "task-1362988" [ 1121.640831] env[61905]: _type = "Task" [ 1121.640831] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.647995] env[61905]: DEBUG oslo_vmware.api [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362988, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.851205] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.851620] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddc4910b-4fed-4968-916d-745c0b76da5e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.858461] env[61905]: DEBUG oslo_vmware.api [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1121.858461] env[61905]: value = "task-1362989" [ 1121.858461] env[61905]: _type = "Task" [ 1121.858461] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.866573] env[61905]: DEBUG oslo_vmware.api [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362989, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.150859] env[61905]: DEBUG oslo_vmware.api [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362988, 'name': ReconfigVM_Task, 'duration_secs': 0.130548} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.151146] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290113', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'name': 'volume-aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c8ad7fcb-4678-40cd-89af-e13de828579b', 'attached_at': '', 'detached_at': '', 'volume_id': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9', 'serial': 'aa6cc5e5-b774-41fa-9d04-e31ee8b6a3b9'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1122.373119] env[61905]: DEBUG oslo_vmware.api [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362989, 'name': PowerOffVM_Task, 'duration_secs': 0.176598} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.373596] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.373596] env[61905]: DEBUG nova.compute.manager [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1122.374302] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f6e119-4504-4b26-a1bf-42a2e42c7e1a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.691294] env[61905]: DEBUG nova.objects.instance [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'flavor' on Instance uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.889281] env[61905]: DEBUG oslo_concurrency.lockutils [None req-5f4d6b3f-3efb-451a-8816-c3418199ebec tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.550s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.404513] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.404513] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1123.700065] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ad333bf8-473e-4981-886b-ed0b7335d1d2 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.206s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1123.741319] env[61905]: INFO nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Rebuilding instance [ 1123.779768] env[61905]: DEBUG nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1123.780663] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfc36cb-21ef-4f4c-b445-7b392dd97d56 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.292696] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.293066] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d5f45ae-4dee-4ff9-85e1-528d29992cd8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.300227] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1124.300227] env[61905]: value = "task-1362990" [ 1124.300227] env[61905]: _type = "Task" [ 1124.300227] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.310747] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] VM already powered off {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1124.310961] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.311695] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fef5738-d153-448a-ab69-e0a59fcf70b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.317903] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.318132] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d78a76a-d10e-4126-ae0e-aeac7495295e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.378756] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1124.378756] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1124.378756] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.379027] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd9a0d10-df78-4574-9842-84eee6e087ab {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.386344] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1124.386344] env[61905]: value = "task-1362992" [ 1124.386344] env[61905]: _type = "Task" [ 1124.386344] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.393534] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.694537] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.694757] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.694970] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "c8ad7fcb-4678-40cd-89af-e13de828579b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.695175] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.695349] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.697568] env[61905]: INFO nova.compute.manager [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Terminating instance [ 1124.699235] env[61905]: DEBUG nova.compute.manager [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1124.699427] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.700309] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794a8de8-dec0-4f95-b5fd-c81bf9081c45 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.707631] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.707841] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2eea8dba-bfbd-45a4-be7c-70fec1cb1df9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.714284] env[61905]: DEBUG oslo_vmware.api [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1124.714284] env[61905]: value = "task-1362993" [ 1124.714284] env[61905]: _type = "Task" [ 1124.714284] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.721775] env[61905]: DEBUG oslo_vmware.api [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.896045] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126194} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.896347] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.896573] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1124.896791] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.225306] env[61905]: DEBUG oslo_vmware.api [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362993, 'name': PowerOffVM_Task, 'duration_secs': 0.167296} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.225572] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.225746] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1125.226009] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-593fa07d-9a94-41f5-81df-aae56fd31d38 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.289557] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.289789] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.289974] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Deleting the datastore file [datastore1] c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.290275] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7a67bc2-e046-4d83-8c58-62e2b4931191 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.296532] env[61905]: DEBUG oslo_vmware.api [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for the task: (returnval){ [ 1125.296532] env[61905]: value = "task-1362995" [ 1125.296532] env[61905]: _type = "Task" [ 1125.296532] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.304057] env[61905]: DEBUG oslo_vmware.api [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.404480] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.404676] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.806483] env[61905]: DEBUG oslo_vmware.api [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Task: {'id': task-1362995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125453} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.806745] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.806932] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.807131] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.807312] env[61905]: INFO nova.compute.manager [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1125.807553] env[61905]: DEBUG oslo.service.loopingcall [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1125.807757] env[61905]: DEBUG nova.compute.manager [-] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1125.807850] env[61905]: DEBUG nova.network.neutron [-] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1125.928185] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1125.928482] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1125.928654] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1125.928838] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1125.928984] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1125.929155] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1125.929363] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1125.929522] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1125.929703] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1125.929870] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1125.930059] env[61905]: DEBUG nova.virt.hardware [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1125.930944] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52a7230-6883-4860-8a1e-7171ae40c7c8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.938972] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6425b8b0-1dfd-42fd-bc49-7324bf41dda9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.954243] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:ea:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '834d4e50-6879-4fde-9aeb-316ff08637ac', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1125.961446] env[61905]: DEBUG oslo.service.loopingcall [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1125.961716] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1125.961936] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07daf303-33bc-4504-819f-a75e312f6a45 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.983165] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1125.983165] env[61905]: value = "task-1362996" [ 1125.983165] env[61905]: _type = "Task" [ 1125.983165] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.990747] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362996, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.405086] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.464402] env[61905]: DEBUG nova.compute.manager [req-0b55fb61-8b75-49f5-a37d-8c31e37e0a20 req-7d5ff212-bab5-4d40-b941-2d56c0daf087 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Received event network-vif-deleted-f15cfc92-b273-4628-b203-bb8462935ba9 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1126.464595] env[61905]: INFO nova.compute.manager [req-0b55fb61-8b75-49f5-a37d-8c31e37e0a20 req-7d5ff212-bab5-4d40-b941-2d56c0daf087 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Neutron deleted interface f15cfc92-b273-4628-b203-bb8462935ba9; detaching it from the instance and deleting it from the info cache [ 1126.464698] env[61905]: DEBUG nova.network.neutron [req-0b55fb61-8b75-49f5-a37d-8c31e37e0a20 req-7d5ff212-bab5-4d40-b941-2d56c0daf087 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.492935] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1362996, 'name': CreateVM_Task, 'duration_secs': 0.306421} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.493148] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1126.493812] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.493980] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.494319] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1126.494577] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a58282-e56f-4a51-8d52-95c230a90b8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.499377] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1126.499377] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52321037-464a-6950-3eed-882272de493f" [ 1126.499377] env[61905]: _type = "Task" [ 1126.499377] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.506733] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52321037-464a-6950-3eed-882272de493f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.938559] env[61905]: DEBUG nova.network.neutron [-] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.967136] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bd9312a-4b73-400c-9be2-b1ea420a2bd4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.976433] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db657067-45fb-44a8-88b5-17f936f0d6a2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.002343] env[61905]: DEBUG nova.compute.manager [req-0b55fb61-8b75-49f5-a37d-8c31e37e0a20 req-7d5ff212-bab5-4d40-b941-2d56c0daf087 service nova] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Detach interface failed, port_id=f15cfc92-b273-4628-b203-bb8462935ba9, reason: Instance c8ad7fcb-4678-40cd-89af-e13de828579b could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1127.011585] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52321037-464a-6950-3eed-882272de493f, 'name': SearchDatastore_Task, 'duration_secs': 0.009269} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.011918] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.012739] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1127.012739] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1127.012739] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.012739] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1127.013096] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75954104-635b-4c2d-aa65-1f1192b45345 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.021515] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1127.021712] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1127.022662] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed90adf3-7027-45fd-965c-0b1b06761186 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.027655] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1127.027655] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52770665-ce47-b057-e3bd-2ea6109c0877" [ 1127.027655] env[61905]: _type = "Task" [ 1127.027655] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.035261] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52770665-ce47-b057-e3bd-2ea6109c0877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.400341] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.404116] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.404313] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.441963] env[61905]: INFO nova.compute.manager [-] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Took 1.63 seconds to deallocate network for instance. [ 1127.539634] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52770665-ce47-b057-e3bd-2ea6109c0877, 'name': SearchDatastore_Task, 'duration_secs': 0.008043} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.540525] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-835efd7e-aef0-4f58-9f97-16fca82582b5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.546036] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1127.546036] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5251dbb4-a9d3-f11e-976f-cd2a1a09a55f" [ 1127.546036] env[61905]: _type = "Task" [ 1127.546036] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.554665] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5251dbb4-a9d3-f11e-976f-cd2a1a09a55f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.948627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.948919] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.949171] env[61905]: DEBUG nova.objects.instance [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lazy-loading 'resources' on Instance uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.055870] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5251dbb4-a9d3-f11e-976f-cd2a1a09a55f, 'name': SearchDatastore_Task, 'duration_secs': 0.009095} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.056153] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1128.056414] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1128.056674] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4236b836-f89b-4230-b5d6-b2fff7c86470 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.063615] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1128.063615] env[61905]: value = "task-1362997" [ 1128.063615] env[61905]: _type = "Task" [ 1128.063615] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.070861] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362997, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.517452] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23412bf8-3c68-4df7-b92e-86d1347cc642 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.525129] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ee8319-6fd9-4138-8bc9-100f1c408aae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.556393] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2218e41-961f-4c5b-8adf-728d509aec5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.563354] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb1df15-a7ae-4881-abce-1c7c0c4a659f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.575331] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362997, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.424161} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.582604] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1128.582834] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1128.583288] env[61905]: DEBUG nova.compute.provider_tree [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.585079] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27e36e9d-9a8a-449f-8244-8fd7043232a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.592306] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1128.592306] env[61905]: value = "task-1362998" [ 1128.592306] env[61905]: _type = "Task" [ 1128.592306] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.600747] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.086757] env[61905]: DEBUG nova.scheduler.client.report [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1129.102807] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061745} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.103667] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1129.104447] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d60dfe1-7d0c-4b46-84bc-f3dd75a72b4e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.126740] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1129.127207] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6139e35d-1217-4856-929e-1838314dcf00 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.147468] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1129.147468] env[61905]: value = "task-1362999" [ 1129.147468] env[61905]: _type = "Task" [ 1129.147468] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.159531] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362999, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.404750] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.404980] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1129.405042] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1129.591502] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.611548] env[61905]: INFO nova.scheduler.client.report [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Deleted allocations for instance c8ad7fcb-4678-40cd-89af-e13de828579b [ 1129.659788] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1362999, 'name': ReconfigVM_Task, 'duration_secs': 0.448214} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.660098] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Reconfigured VM instance instance-0000006b to attach disk [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710/e2ebe77a-0a93-4d53-bb47-acfef6aeb710.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.660797] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91493f27-4b72-4ab9-ab6e-b088bba02059 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.667942] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1129.667942] env[61905]: value = "task-1363000" [ 1129.667942] env[61905]: _type = "Task" [ 1129.667942] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.675332] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363000, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.948463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.948619] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.948763] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1129.948918] env[61905]: DEBUG nova.objects.instance [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lazy-loading 'info_cache' on Instance uuid c8ad7fcb-4678-40cd-89af-e13de828579b {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.119573] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d2f3f32e-d680-4475-af07-f844ebae4776 tempest-AttachVolumeTestJSON-610274873 tempest-AttachVolumeTestJSON-610274873-project-member] Lock "c8ad7fcb-4678-40cd-89af-e13de828579b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.425s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1130.177218] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363000, 'name': Rename_Task, 'duration_secs': 0.141595} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.177436] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1130.177652] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3ac5b46-0eaf-4d6f-93ff-91d22b1af2b4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.185943] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1130.185943] env[61905]: value = "task-1363001" [ 1130.185943] env[61905]: _type = "Task" [ 1130.185943] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.194359] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363001, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.696320] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363001, 'name': PowerOnVM_Task, 'duration_secs': 0.423043} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.696658] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.696838] env[61905]: DEBUG nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1130.697624] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a80aacd-2e0e-4613-9912-4d61df6d2334 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.955107] env[61905]: DEBUG nova.compute.utils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Can not refresh info_cache because instance was not found {{(pid=61905) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1130.974113] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1131.208549] env[61905]: INFO nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] bringing vm to original state: 'stopped' [ 1131.557045] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.060134] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-c8ad7fcb-4678-40cd-89af-e13de828579b" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1132.060600] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: c8ad7fcb-4678-40cd-89af-e13de828579b] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1132.060600] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.215441] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.215827] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.216083] env[61905]: DEBUG nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1132.217415] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93aa111-2e4b-4371-afba-77b5751cc67c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.223928] env[61905]: DEBUG nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61905) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1132.226031] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.226306] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-469d404f-353d-4de1-a63b-45381f515d55 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.231487] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1132.231487] env[61905]: value = "task-1363003" [ 1132.231487] env[61905]: _type = "Task" [ 1132.231487] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.238891] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.566852] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.567113] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.567288] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.567489] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1132.568438] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba680e8-b808-4bb1-825f-eab581e3c118 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.576547] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91124dd-1bbe-4e2d-945b-d1d0cb5e4ba2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.590863] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7793b7a7-8c99-4bbb-b68e-d016b3fa6a61 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.597319] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c4201e-7195-4d0b-b3e1-b53737d59ecf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.626537] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180859MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1132.626701] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.626868] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.741142] env[61905]: DEBUG oslo_vmware.api [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363003, 'name': PowerOffVM_Task, 'duration_secs': 0.180355} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.741420] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.741600] env[61905]: DEBUG nova.compute.manager [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1132.742390] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fe3296-fe30-4d37-a7c7-d63c7aed7571 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.253804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.038s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.651592] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.651796] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.651927] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e2ebe77a-0a93-4d53-bb47-acfef6aeb710 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.652115] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1133.652256] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1133.696438] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355114b6-d2e8-4832-b9e1-c7aaa2c04641 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.703734] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cdeb1c0-d16c-4084-a97d-4bc6fee79bbc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.733011] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ba8d1d-9065-4114-85fc-078dfc850f96 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.739659] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f062d7d-5db8-460b-95aa-655d91a92911 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.752076] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.762280] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.005579] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.005860] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.006093] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.006289] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.006468] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.009270] env[61905]: INFO nova.compute.manager [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Terminating instance [ 1134.011210] env[61905]: DEBUG nova.compute.manager [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1134.011382] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1134.012278] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a424e7d5-0e2d-4c45-9098-87f20c2ccdff {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.020546] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1134.020842] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94b69f98-e71e-4940-96e2-04a4fb34d524 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.255053] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1134.291820] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1134.292133] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1134.292392] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore1] e2ebe77a-0a93-4d53-bb47-acfef6aeb710 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1134.292685] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1760cec2-600e-4fc0-922a-d8fd2ad96692 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.298854] env[61905]: DEBUG oslo_vmware.api [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1134.298854] env[61905]: value = "task-1363006" [ 1134.298854] env[61905]: _type = "Task" [ 1134.298854] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.306332] env[61905]: DEBUG oslo_vmware.api [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.760418] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1134.760627] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.134s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.760912] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.999s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.761106] env[61905]: DEBUG nova.objects.instance [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1134.808734] env[61905]: DEBUG oslo_vmware.api [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12806} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.808983] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.809184] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.809363] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.809535] env[61905]: INFO nova.compute.manager [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Took 0.80 seconds to destroy the instance on the hypervisor. [ 1134.809777] env[61905]: DEBUG oslo.service.loopingcall [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1134.809958] env[61905]: DEBUG nova.compute.manager [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1134.810066] env[61905]: DEBUG nova.network.neutron [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1135.060289] env[61905]: DEBUG nova.compute.manager [req-a6f496b7-8129-4cd0-aa04-040a6f97df2d req-a65753e3-1ff3-4a4d-84f3-f20b9273c70d service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Received event network-vif-deleted-834d4e50-6879-4fde-9aeb-316ff08637ac {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1135.060345] env[61905]: INFO nova.compute.manager [req-a6f496b7-8129-4cd0-aa04-040a6f97df2d req-a65753e3-1ff3-4a4d-84f3-f20b9273c70d service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Neutron deleted interface 834d4e50-6879-4fde-9aeb-316ff08637ac; detaching it from the instance and deleting it from the info cache [ 1135.060533] env[61905]: DEBUG nova.network.neutron [req-a6f496b7-8129-4cd0-aa04-040a6f97df2d req-a65753e3-1ff3-4a4d-84f3-f20b9273c70d service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.509899] env[61905]: DEBUG nova.network.neutron [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.563520] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60e5ec85-f6a9-4bdc-ba01-c14fd441eee2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.573552] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb474b7a-10b4-46af-ac26-cc3b3c2d9138 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.595991] env[61905]: DEBUG nova.compute.manager [req-a6f496b7-8129-4cd0-aa04-040a6f97df2d req-a65753e3-1ff3-4a4d-84f3-f20b9273c70d service nova] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Detach interface failed, port_id=834d4e50-6879-4fde-9aeb-316ff08637ac, reason: Instance e2ebe77a-0a93-4d53-bb47-acfef6aeb710 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1135.769205] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bd316de5-f21f-41dd-9842-ecadcbf05bfa tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.012988] env[61905]: INFO nova.compute.manager [-] [instance: e2ebe77a-0a93-4d53-bb47-acfef6aeb710] Took 1.20 seconds to deallocate network for instance. [ 1136.519752] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.520137] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.520275] env[61905]: DEBUG nova.objects.instance [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'resources' on Instance uuid e2ebe77a-0a93-4d53-bb47-acfef6aeb710 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.073979] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d479fe9-bba5-4dd1-b536-2af26b4e8242 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.081580] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53097bbd-023c-4dd3-b537-2d9ef4c1f8f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.112204] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55ffa36-5bbe-43e3-a351-5fe3dd8d36c0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.119196] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25121be9-1c62-43e2-8b15-be200ce1556c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.133074] env[61905]: DEBUG nova.compute.provider_tree [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.637086] env[61905]: DEBUG nova.scheduler.client.report [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.142126] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.622s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.166125] env[61905]: INFO nova.scheduler.client.report [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted allocations for instance e2ebe77a-0a93-4d53-bb47-acfef6aeb710 [ 1138.673469] env[61905]: DEBUG oslo_concurrency.lockutils [None req-52932117-cb01-4967-8e88-93edb932f50d tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e2ebe77a-0a93-4d53-bb47-acfef6aeb710" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.667s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1141.073903] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1141.074233] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1141.577183] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1142.102060] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.102354] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.103855] env[61905]: INFO nova.compute.claims [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1143.160456] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e3653b-8ab7-471e-9fca-d33ce9e25c49 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.167784] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04d98e6-609f-4ad4-ad76-441301b49f1b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.197687] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248b775e-cff5-41bd-96ef-5b403442cd75 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.204526] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a27bd6-035f-483b-a9b8-2676f403986f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.217431] env[61905]: DEBUG nova.compute.provider_tree [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.721033] env[61905]: DEBUG nova.scheduler.client.report [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1144.226918] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.124s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.227758] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1144.734303] env[61905]: DEBUG nova.compute.utils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1144.734647] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1144.734818] env[61905]: DEBUG nova.network.neutron [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1144.784413] env[61905]: DEBUG nova.policy [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b126f47b9df4f4586f377f70faada62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edbab61ee8984d0c91eab473eba0047c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1145.055027] env[61905]: DEBUG nova.network.neutron [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Successfully created port: 52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1145.237968] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1146.247854] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1146.273922] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1146.274243] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1146.274425] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1146.274628] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1146.274784] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1146.274935] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1146.275164] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1146.275328] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1146.275498] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1146.275656] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1146.275823] env[61905]: DEBUG nova.virt.hardware [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1146.276743] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb9ea48-e542-44ee-8942-e4e170e9614f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.284861] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad0d58e-0884-450d-9994-a0fc654c5668 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.419949] env[61905]: DEBUG nova.compute.manager [req-76a067bf-ee55-42b5-a400-59809422b5b3 req-58a3dc94-197d-4c34-a07f-69a5e1997f25 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Received event network-vif-plugged-52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1146.419949] env[61905]: DEBUG oslo_concurrency.lockutils [req-76a067bf-ee55-42b5-a400-59809422b5b3 req-58a3dc94-197d-4c34-a07f-69a5e1997f25 service nova] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.419949] env[61905]: DEBUG oslo_concurrency.lockutils [req-76a067bf-ee55-42b5-a400-59809422b5b3 req-58a3dc94-197d-4c34-a07f-69a5e1997f25 service nova] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.420063] env[61905]: DEBUG oslo_concurrency.lockutils [req-76a067bf-ee55-42b5-a400-59809422b5b3 req-58a3dc94-197d-4c34-a07f-69a5e1997f25 service nova] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.420199] env[61905]: DEBUG nova.compute.manager [req-76a067bf-ee55-42b5-a400-59809422b5b3 req-58a3dc94-197d-4c34-a07f-69a5e1997f25 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] No waiting events found dispatching network-vif-plugged-52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1146.420367] env[61905]: WARNING nova.compute.manager [req-76a067bf-ee55-42b5-a400-59809422b5b3 req-58a3dc94-197d-4c34-a07f-69a5e1997f25 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Received unexpected event network-vif-plugged-52bb8f28-a061-4639-988e-2d97db166c66 for instance with vm_state building and task_state spawning. [ 1146.980567] env[61905]: DEBUG nova.network.neutron [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Successfully updated port: 52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1147.004524] env[61905]: DEBUG nova.compute.manager [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Received event network-changed-52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1147.004695] env[61905]: DEBUG nova.compute.manager [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Refreshing instance network info cache due to event network-changed-52bb8f28-a061-4639-988e-2d97db166c66. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1147.004911] env[61905]: DEBUG oslo_concurrency.lockutils [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] Acquiring lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.005073] env[61905]: DEBUG oslo_concurrency.lockutils [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] Acquired lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.005243] env[61905]: DEBUG nova.network.neutron [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Refreshing network info cache for port 52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.483731] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.544598] env[61905]: DEBUG nova.network.neutron [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.610442] env[61905]: DEBUG nova.network.neutron [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.112740] env[61905]: DEBUG oslo_concurrency.lockutils [req-4ba063c9-313f-42f7-9b18-4e0e55e3ac62 req-b6a7c063-72f4-424a-a65f-f4ab213ad343 service nova] Releasing lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.113161] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.113319] env[61905]: DEBUG nova.network.neutron [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.642971] env[61905]: DEBUG nova.network.neutron [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1148.762284] env[61905]: DEBUG nova.network.neutron [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating instance_info_cache with network_info: [{"id": "52bb8f28-a061-4639-988e-2d97db166c66", "address": "fa:16:3e:a0:6f:17", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52bb8f28-a0", "ovs_interfaceid": "52bb8f28-a061-4639-988e-2d97db166c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.265584] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.265891] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance network_info: |[{"id": "52bb8f28-a061-4639-988e-2d97db166c66", "address": "fa:16:3e:a0:6f:17", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52bb8f28-a0", "ovs_interfaceid": "52bb8f28-a061-4639-988e-2d97db166c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1149.266380] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:6f:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52bb8f28-a061-4639-988e-2d97db166c66', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.273822] env[61905]: DEBUG oslo.service.loopingcall [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1149.274071] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.274332] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e1c8fd0-35ae-4efd-b550-128a85ffa770 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.296180] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.296180] env[61905]: value = "task-1363008" [ 1149.296180] env[61905]: _type = "Task" [ 1149.296180] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.303579] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363008, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.806928] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363008, 'name': CreateVM_Task, 'duration_secs': 0.306777} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.807316] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1149.807804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.808014] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.808371] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1149.808618] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08e7d57c-35aa-459b-9f56-40cf28d5760a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.813166] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1149.813166] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5291f48b-476a-2309-ffc2-317e7120b2e7" [ 1149.813166] env[61905]: _type = "Task" [ 1149.813166] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.820316] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5291f48b-476a-2309-ffc2-317e7120b2e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.248831] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.249082] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.324311] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5291f48b-476a-2309-ffc2-317e7120b2e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009962} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.324605] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.324907] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1150.325170] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.325321] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.325500] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.325748] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f33b719-fdc0-4527-9261-ddf7316d1ed2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.333433] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.333602] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1150.334370] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1928fe14-1b3a-4f28-825e-17d4d523fd21 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.339339] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1150.339339] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52512095-9bd6-b55d-119c-36e0bfe0c848" [ 1150.339339] env[61905]: _type = "Task" [ 1150.339339] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.346247] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52512095-9bd6-b55d-119c-36e0bfe0c848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.752607] env[61905]: DEBUG nova.compute.utils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1150.849133] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52512095-9bd6-b55d-119c-36e0bfe0c848, 'name': SearchDatastore_Task, 'duration_secs': 0.008586} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.849871] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b297a469-7bda-40f5-b1eb-772adcdaee1c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.854526] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1150.854526] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5206cd45-c95e-6fcb-99bf-3dd98fff6de9" [ 1150.854526] env[61905]: _type = "Task" [ 1150.854526] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.862186] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5206cd45-c95e-6fcb-99bf-3dd98fff6de9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.255539] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.365180] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5206cd45-c95e-6fcb-99bf-3dd98fff6de9, 'name': SearchDatastore_Task, 'duration_secs': 0.009239} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.365456] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.365711] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1151.366013] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b973b7c6-0eab-4e5d-9f97-d745804c73bd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.374345] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1151.374345] env[61905]: value = "task-1363009" [ 1151.374345] env[61905]: _type = "Task" [ 1151.374345] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.381886] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.884332] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.418743} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.884637] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1151.884750] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1151.884992] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aeab383f-23ab-42fe-a2e8-65a9523242df {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.891068] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1151.891068] env[61905]: value = "task-1363010" [ 1151.891068] env[61905]: _type = "Task" [ 1151.891068] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.899041] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363010, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.323325] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.323600] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.323835] env[61905]: INFO nova.compute.manager [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Attaching volume fa204fb6-fba1-4bdd-854d-b410a5e7d535 to /dev/sdb [ 1152.357098] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a29aac-0376-44cd-8125-c035f4449a1b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.363920] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9780282e-3478-41cc-aac8-c85a8bbf8794 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.378176] env[61905]: DEBUG nova.virt.block_device [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updating existing volume attachment record: b03ac3c1-11c4-4634-9d2b-f4214a4f1646 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1152.399884] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363010, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067375} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.400157] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1152.400919] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f28ebb7-d8b8-4869-8fd8-9812e2c6a8f6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.422209] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.422532] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c657346-fff3-4454-bf1f-6b4578632d73 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.442428] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1152.442428] env[61905]: value = "task-1363011" [ 1152.442428] env[61905]: _type = "Task" [ 1152.442428] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.450052] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.955759] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363011, 'name': ReconfigVM_Task, 'duration_secs': 0.294962} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.956232] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.956812] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8955f84-3bd2-4689-89b1-fab357a96691 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.962640] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1152.962640] env[61905]: value = "task-1363013" [ 1152.962640] env[61905]: _type = "Task" [ 1152.962640] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.970371] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363013, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.471967] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363013, 'name': Rename_Task, 'duration_secs': 0.124517} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.472221] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.472468] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ccb07b0-2c82-4692-8e0e-7489388ece83 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.477646] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1153.477646] env[61905]: value = "task-1363014" [ 1153.477646] env[61905]: _type = "Task" [ 1153.477646] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.484573] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.988270] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363014, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.487889] env[61905]: DEBUG oslo_vmware.api [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363014, 'name': PowerOnVM_Task, 'duration_secs': 0.564659} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.488288] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1154.488507] env[61905]: INFO nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1154.488701] env[61905]: DEBUG nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1154.489448] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b04dea-95ea-438a-a217-d6ee4b344cee {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.009797] env[61905]: INFO nova.compute.manager [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Took 12.93 seconds to build instance. [ 1155.354299] env[61905]: DEBUG nova.compute.manager [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Received event network-changed-52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1155.354437] env[61905]: DEBUG nova.compute.manager [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Refreshing instance network info cache due to event network-changed-52bb8f28-a061-4639-988e-2d97db166c66. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1155.354663] env[61905]: DEBUG oslo_concurrency.lockutils [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] Acquiring lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.354809] env[61905]: DEBUG oslo_concurrency.lockutils [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] Acquired lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.354969] env[61905]: DEBUG nova.network.neutron [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Refreshing network info cache for port 52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1155.511996] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9213eb4f-a42f-4c33-89ac-bfe6d1aa873e tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.438s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.063130] env[61905]: DEBUG nova.network.neutron [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updated VIF entry in instance network info cache for port 52bb8f28-a061-4639-988e-2d97db166c66. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1156.063516] env[61905]: DEBUG nova.network.neutron [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating instance_info_cache with network_info: [{"id": "52bb8f28-a061-4639-988e-2d97db166c66", "address": "fa:16:3e:a0:6f:17", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52bb8f28-a0", "ovs_interfaceid": "52bb8f28-a061-4639-988e-2d97db166c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.565985] env[61905]: DEBUG oslo_concurrency.lockutils [req-a1d6d325-b29c-4752-badf-8870e111b835 req-e25cfcc4-17fb-4118-9065-84894485cdbe service nova] Releasing lock "refresh_cache-e56c9f4f-1398-4370-9b31-5ef91acc78f0" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.920714] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1156.920964] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290117', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'name': 'volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'serial': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1156.921936] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b2b429-9f46-4c27-8981-c43ca9c7b4ea {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.938804] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7633a2ef-3b93-4add-afcd-e29434e675de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.962630] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535/volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.962913] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-221bb658-f461-4676-8a01-a581d3177bc5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.980527] env[61905]: DEBUG oslo_vmware.api [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1156.980527] env[61905]: value = "task-1363016" [ 1156.980527] env[61905]: _type = "Task" [ 1156.980527] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.988627] env[61905]: DEBUG oslo_vmware.api [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363016, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.489834] env[61905]: DEBUG oslo_vmware.api [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363016, 'name': ReconfigVM_Task, 'duration_secs': 0.370169} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.490170] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535/volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.494764] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa2d7b98-3425-4658-8864-f09706137f68 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.508790] env[61905]: DEBUG oslo_vmware.api [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1157.508790] env[61905]: value = "task-1363017" [ 1157.508790] env[61905]: _type = "Task" [ 1157.508790] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.516299] env[61905]: DEBUG oslo_vmware.api [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.019048] env[61905]: DEBUG oslo_vmware.api [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363017, 'name': ReconfigVM_Task, 'duration_secs': 0.129582} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.019048] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290117', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'name': 'volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'serial': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1159.054629] env[61905]: DEBUG nova.objects.instance [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'flavor' on Instance uuid 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.559971] env[61905]: DEBUG oslo_concurrency.lockutils [None req-6505ba1a-2f68-4342-aa9c-4fe97bc76ee5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.236s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.769313] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.769595] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.273024] env[61905]: INFO nova.compute.manager [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Detaching volume fa204fb6-fba1-4bdd-854d-b410a5e7d535 [ 1160.302418] env[61905]: INFO nova.virt.block_device [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Attempting to driver detach volume fa204fb6-fba1-4bdd-854d-b410a5e7d535 from mountpoint /dev/sdb [ 1160.302665] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1160.302855] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290117', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'name': 'volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'serial': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1160.303759] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603979d2-0948-45c2-9704-e29f6091b636 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.325052] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e06fdb-3a94-4b50-ad65-817c5776fbe5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.331987] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44571396-21c4-4833-9db7-91b0b8a05236 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.351766] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa25cf5f-2bd9-43f5-bda8-7fc142936e4e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.366519] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] The volume has not been displaced from its original location: [datastore1] volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535/volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1160.371668] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1160.372032] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30285c13-77b6-490a-bcf7-2510c69096f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.390083] env[61905]: DEBUG oslo_vmware.api [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1160.390083] env[61905]: value = "task-1363018" [ 1160.390083] env[61905]: _type = "Task" [ 1160.390083] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.398955] env[61905]: DEBUG oslo_vmware.api [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363018, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.900188] env[61905]: DEBUG oslo_vmware.api [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363018, 'name': ReconfigVM_Task, 'duration_secs': 0.223005} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.900502] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1160.905157] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-724223e2-b294-408a-a285-240ef1efc2fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.919537] env[61905]: DEBUG oslo_vmware.api [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1160.919537] env[61905]: value = "task-1363019" [ 1160.919537] env[61905]: _type = "Task" [ 1160.919537] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.927401] env[61905]: DEBUG oslo_vmware.api [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.429523] env[61905]: DEBUG oslo_vmware.api [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363019, 'name': ReconfigVM_Task, 'duration_secs': 0.137667} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.429930] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290117', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'name': 'volume-fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516', 'attached_at': '', 'detached_at': '', 'volume_id': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535', 'serial': 'fa204fb6-fba1-4bdd-854d-b410a5e7d535'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1161.970061] env[61905]: DEBUG nova.objects.instance [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'flavor' on Instance uuid 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.977510] env[61905]: DEBUG oslo_concurrency.lockutils [None req-9603bdde-59a3-4870-8107-5e7aad1475bc tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.003450] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.003844] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.003946] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.004118] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.004297] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.006563] env[61905]: INFO nova.compute.manager [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Terminating instance [ 1164.008324] env[61905]: DEBUG nova.compute.manager [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1164.008530] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1164.009367] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4dad98-5eed-489a-ac7c-538730ad08a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.017142] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1164.017388] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5569720-1e1c-4659-b1eb-07408e9f100e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.023597] env[61905]: DEBUG oslo_vmware.api [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1164.023597] env[61905]: value = "task-1363020" [ 1164.023597] env[61905]: _type = "Task" [ 1164.023597] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.031203] env[61905]: DEBUG oslo_vmware.api [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.533974] env[61905]: DEBUG oslo_vmware.api [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363020, 'name': PowerOffVM_Task, 'duration_secs': 0.216833} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.534283] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.534438] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1164.534686] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a710410-75f6-48e6-b66b-e59496d37120 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.615771] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1164.615992] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1164.616176] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleting the datastore file [datastore2] 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1164.616445] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca506151-ee00-492d-90c0-39c494694cfa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.622414] env[61905]: DEBUG oslo_vmware.api [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1164.622414] env[61905]: value = "task-1363022" [ 1164.622414] env[61905]: _type = "Task" [ 1164.622414] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.629536] env[61905]: DEBUG oslo_vmware.api [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.132272] env[61905]: DEBUG oslo_vmware.api [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126105} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.132629] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1165.132751] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1165.132958] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1165.133148] env[61905]: INFO nova.compute.manager [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1165.133384] env[61905]: DEBUG oslo.service.loopingcall [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1165.133574] env[61905]: DEBUG nova.compute.manager [-] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1165.133672] env[61905]: DEBUG nova.network.neutron [-] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1165.552274] env[61905]: DEBUG nova.compute.manager [req-e5871fcf-1d18-4041-bffd-92c613a661b4 req-486b64da-3f6f-45be-922f-60087b9820bc service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Received event network-vif-deleted-ccea4ae3-1473-40c2-975d-7af1688f089b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1165.552569] env[61905]: INFO nova.compute.manager [req-e5871fcf-1d18-4041-bffd-92c613a661b4 req-486b64da-3f6f-45be-922f-60087b9820bc service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Neutron deleted interface ccea4ae3-1473-40c2-975d-7af1688f089b; detaching it from the instance and deleting it from the info cache [ 1165.552654] env[61905]: DEBUG nova.network.neutron [req-e5871fcf-1d18-4041-bffd-92c613a661b4 req-486b64da-3f6f-45be-922f-60087b9820bc service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.023867] env[61905]: DEBUG nova.network.neutron [-] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.055668] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b32a9f94-4a4b-4380-9ea2-a331e5503318 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.065398] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdf198b-a0d6-452b-9d76-ceeaf6b68c31 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.090012] env[61905]: DEBUG nova.compute.manager [req-e5871fcf-1d18-4041-bffd-92c613a661b4 req-486b64da-3f6f-45be-922f-60087b9820bc service nova] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Detach interface failed, port_id=ccea4ae3-1473-40c2-975d-7af1688f089b, reason: Instance 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1166.526294] env[61905]: INFO nova.compute.manager [-] [instance: 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516] Took 1.39 seconds to deallocate network for instance. [ 1167.033660] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.033927] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.034175] env[61905]: DEBUG nova.objects.instance [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'resources' on Instance uuid 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1167.554688] env[61905]: DEBUG nova.scheduler.client.report [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Refreshing inventories for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1167.568450] env[61905]: DEBUG nova.scheduler.client.report [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Updating ProviderTree inventory for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1167.568669] env[61905]: DEBUG nova.compute.provider_tree [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Updating inventory in ProviderTree for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1167.578399] env[61905]: DEBUG nova.scheduler.client.report [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Refreshing aggregate associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, aggregates: None {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1167.594239] env[61905]: DEBUG nova.scheduler.client.report [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Refreshing trait associations for resource provider 9cb855ec-212a-457a-a4ff-55e9d97323b7, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=61905) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1167.636429] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1034277a-7de2-40e9-8762-9eab40d6ffd6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.643988] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7a9ee7-fdb2-4fbc-80a0-a7cca375e118 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.673139] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22856bf6-f1eb-4389-8d56-f2e8419a97c7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.680175] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283b5435-db15-4db7-842a-a1f2ce126c19 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.692757] env[61905]: DEBUG nova.compute.provider_tree [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.195827] env[61905]: DEBUG nova.scheduler.client.report [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1168.702443] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.668s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.722677] env[61905]: INFO nova.scheduler.client.report [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted allocations for instance 9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516 [ 1169.231221] env[61905]: DEBUG oslo_concurrency.lockutils [None req-38f7af68-9e4f-4620-b229-e2bf41c37ef5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "9ec2fe73-b51c-4ce1-bf4e-e4dbcbb4c516" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.227s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.837773] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.838014] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.340599] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1171.861975] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.862315] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.863808] env[61905]: INFO nova.compute.claims [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1172.916103] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3954984-2537-4f0c-856f-9db7f49899cf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.923394] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb91cff-7830-4088-afe0-8926589f0118 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.953506] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca777f9-7044-4a34-9ceb-8dbda900f4e9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.960521] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794deb4f-4f17-422a-a090-8d25b40d44b1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.972938] env[61905]: DEBUG nova.compute.provider_tree [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.475846] env[61905]: DEBUG nova.scheduler.client.report [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.981022] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.981575] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1174.486512] env[61905]: DEBUG nova.compute.utils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1174.487930] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1174.488111] env[61905]: DEBUG nova.network.neutron [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1174.535630] env[61905]: DEBUG nova.policy [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '302950aecbc54ee0843853aac306fab2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28c767f45ae54b8fbfe2c93fc9027447', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1174.790241] env[61905]: DEBUG nova.network.neutron [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Successfully created port: 59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1174.990717] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1176.000541] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1176.024997] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1176.025282] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1176.025442] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.025624] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1176.025770] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.025916] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1176.026161] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1176.026328] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1176.026496] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1176.026660] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1176.026830] env[61905]: DEBUG nova.virt.hardware [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1176.027728] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ce6a8b-b1eb-431e-b24c-03fa4e9f2a22 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.035548] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649472e8-1f1f-49ad-b168-7424e2ab6d6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.153404] env[61905]: DEBUG nova.compute.manager [req-04ad34e6-44df-4fd9-b8e0-05bfcd290eab req-8132ba47-3406-4328-b5cf-a82f9c67d547 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Received event network-vif-plugged-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1176.153624] env[61905]: DEBUG oslo_concurrency.lockutils [req-04ad34e6-44df-4fd9-b8e0-05bfcd290eab req-8132ba47-3406-4328-b5cf-a82f9c67d547 service nova] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.153877] env[61905]: DEBUG oslo_concurrency.lockutils [req-04ad34e6-44df-4fd9-b8e0-05bfcd290eab req-8132ba47-3406-4328-b5cf-a82f9c67d547 service nova] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.154392] env[61905]: DEBUG oslo_concurrency.lockutils [req-04ad34e6-44df-4fd9-b8e0-05bfcd290eab req-8132ba47-3406-4328-b5cf-a82f9c67d547 service nova] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.154616] env[61905]: DEBUG nova.compute.manager [req-04ad34e6-44df-4fd9-b8e0-05bfcd290eab req-8132ba47-3406-4328-b5cf-a82f9c67d547 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] No waiting events found dispatching network-vif-plugged-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1176.154875] env[61905]: WARNING nova.compute.manager [req-04ad34e6-44df-4fd9-b8e0-05bfcd290eab req-8132ba47-3406-4328-b5cf-a82f9c67d547 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Received unexpected event network-vif-plugged-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 for instance with vm_state building and task_state spawning. [ 1176.237309] env[61905]: DEBUG nova.network.neutron [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Successfully updated port: 59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1176.740330] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.740482] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.740634] env[61905]: DEBUG nova.network.neutron [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1177.273225] env[61905]: DEBUG nova.network.neutron [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1177.406954] env[61905]: DEBUG nova.network.neutron [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updating instance_info_cache with network_info: [{"id": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "address": "fa:16:3e:58:84:67", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59ef522b-b6", "ovs_interfaceid": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.910403] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.910656] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Instance network_info: |[{"id": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "address": "fa:16:3e:58:84:67", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59ef522b-b6", "ovs_interfaceid": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1177.910920] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:84:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59ef522b-b64f-4feb-8ab1-1be5cde2ebb5', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.918315] env[61905]: DEBUG oslo.service.loopingcall [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.918528] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1177.919112] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6c5be44-3ddc-49ca-8dbe-62f396820a4b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.939215] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.939215] env[61905]: value = "task-1363023" [ 1177.939215] env[61905]: _type = "Task" [ 1177.939215] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.946986] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363023, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.191024] env[61905]: DEBUG nova.compute.manager [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Received event network-changed-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1178.191196] env[61905]: DEBUG nova.compute.manager [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Refreshing instance network info cache due to event network-changed-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1178.191527] env[61905]: DEBUG oslo_concurrency.lockutils [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] Acquiring lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.191779] env[61905]: DEBUG oslo_concurrency.lockutils [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] Acquired lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.192070] env[61905]: DEBUG nova.network.neutron [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Refreshing network info cache for port 59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1178.449555] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363023, 'name': CreateVM_Task, 'duration_secs': 0.315865} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.449905] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1178.450423] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.450607] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.450927] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1178.451206] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f01512e-8cc9-4404-90e9-699dd95e3c36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.455303] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1178.455303] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522cf18d-7f97-b2fd-01f5-83a5f5965367" [ 1178.455303] env[61905]: _type = "Task" [ 1178.455303] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.462691] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522cf18d-7f97-b2fd-01f5-83a5f5965367, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.879191] env[61905]: DEBUG nova.network.neutron [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updated VIF entry in instance network info cache for port 59ef522b-b64f-4feb-8ab1-1be5cde2ebb5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1178.879560] env[61905]: DEBUG nova.network.neutron [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updating instance_info_cache with network_info: [{"id": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "address": "fa:16:3e:58:84:67", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59ef522b-b6", "ovs_interfaceid": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.965201] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]522cf18d-7f97-b2fd-01f5-83a5f5965367, 'name': SearchDatastore_Task, 'duration_secs': 0.010121} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.965510] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.965761] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.966019] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1178.966180] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.966364] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.966628] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce87ba18-0aa4-4321-9b87-b6caeabec95f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.975051] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.975180] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1178.975906] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66284448-1f62-4667-8921-757fd48dc1bb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.980688] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1178.980688] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52856a72-3c78-acef-1e61-80b2557c0c6b" [ 1178.980688] env[61905]: _type = "Task" [ 1178.980688] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.988108] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52856a72-3c78-acef-1e61-80b2557c0c6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.382759] env[61905]: DEBUG oslo_concurrency.lockutils [req-132ff529-7b52-43a0-9211-bd123d0f70a7 req-2327d683-88e1-459b-abd7-a9081d1b075e service nova] Releasing lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1179.492338] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52856a72-3c78-acef-1e61-80b2557c0c6b, 'name': SearchDatastore_Task, 'duration_secs': 0.007519} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.494030] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73fe6679-3a57-4c28-85b4-d5da46c6ffcf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.497930] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1179.497930] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283bf3b-7811-5d3c-170d-1f10a4737396" [ 1179.497930] env[61905]: _type = "Task" [ 1179.497930] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.505119] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283bf3b-7811-5d3c-170d-1f10a4737396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.008033] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283bf3b-7811-5d3c-170d-1f10a4737396, 'name': SearchDatastore_Task, 'duration_secs': 0.009148} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.008669] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.008669] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 3c9eec15-9349-478f-9429-a33cf5cfae0e/3c9eec15-9349-478f-9429-a33cf5cfae0e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.008817] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66bb0b36-1001-43c6-97a7-9b091e70323c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.015262] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1180.015262] env[61905]: value = "task-1363024" [ 1180.015262] env[61905]: _type = "Task" [ 1180.015262] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.022754] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.524812] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472677} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.525196] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore1] 3c9eec15-9349-478f-9429-a33cf5cfae0e/3c9eec15-9349-478f-9429-a33cf5cfae0e.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.525285] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.525486] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fec8b523-ea41-46ff-96ca-1e597b0b6b7e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.531849] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1180.531849] env[61905]: value = "task-1363025" [ 1180.531849] env[61905]: _type = "Task" [ 1180.531849] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.539140] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.041184] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114323} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.041489] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1181.042265] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8e6e1b-c04c-40b9-966f-a80f0aa8c743 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.063426] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 3c9eec15-9349-478f-9429-a33cf5cfae0e/3c9eec15-9349-478f-9429-a33cf5cfae0e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1181.063664] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10070c1a-aa4f-459b-bad5-0b1650935b08 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.083411] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1181.083411] env[61905]: value = "task-1363026" [ 1181.083411] env[61905]: _type = "Task" [ 1181.083411] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.090926] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.594121] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363026, 'name': ReconfigVM_Task, 'duration_secs': 0.273365} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.594561] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 3c9eec15-9349-478f-9429-a33cf5cfae0e/3c9eec15-9349-478f-9429-a33cf5cfae0e.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.595312] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2561c6b-8f66-4967-a690-4c701130a93c {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.602179] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1181.602179] env[61905]: value = "task-1363027" [ 1181.602179] env[61905]: _type = "Task" [ 1181.602179] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.609393] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363027, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.112117] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363027, 'name': Rename_Task, 'duration_secs': 0.146035} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.112439] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.112691] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22af7662-6b6c-42fa-8f5f-e25820c7fa33 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.118908] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1182.118908] env[61905]: value = "task-1363028" [ 1182.118908] env[61905]: _type = "Task" [ 1182.118908] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.125959] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.628889] env[61905]: DEBUG oslo_vmware.api [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363028, 'name': PowerOnVM_Task, 'duration_secs': 0.431345} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.629284] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1182.629381] env[61905]: INFO nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1182.629545] env[61905]: DEBUG nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1182.630317] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a1ea31-edc7-47b9-8d7d-a191385cf721 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.147336] env[61905]: INFO nova.compute.manager [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Took 11.30 seconds to build instance. [ 1183.649314] env[61905]: DEBUG oslo_concurrency.lockutils [None req-79192e5b-f9af-409d-af07-f471502674b2 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.811s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1183.999749] env[61905]: DEBUG nova.compute.manager [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Received event network-changed-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1183.999907] env[61905]: DEBUG nova.compute.manager [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Refreshing instance network info cache due to event network-changed-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1184.000155] env[61905]: DEBUG oslo_concurrency.lockutils [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] Acquiring lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1184.000329] env[61905]: DEBUG oslo_concurrency.lockutils [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] Acquired lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.000497] env[61905]: DEBUG nova.network.neutron [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Refreshing network info cache for port 59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1184.707884] env[61905]: DEBUG nova.network.neutron [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updated VIF entry in instance network info cache for port 59ef522b-b64f-4feb-8ab1-1be5cde2ebb5. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.708324] env[61905]: DEBUG nova.network.neutron [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updating instance_info_cache with network_info: [{"id": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "address": "fa:16:3e:58:84:67", "network": {"id": "10109894-8fb6-4c2b-81c6-86d97b5bb96a", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1027369737-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28c767f45ae54b8fbfe2c93fc9027447", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59ef522b-b6", "ovs_interfaceid": "59ef522b-b64f-4feb-8ab1-1be5cde2ebb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.211559] env[61905]: DEBUG oslo_concurrency.lockutils [req-a3447b7c-6821-4222-8ebc-e4c93b3a3377 req-becc4710-4198-4f43-90c0-a4dea99cde28 service nova] Releasing lock "refresh_cache-3c9eec15-9349-478f-9429-a33cf5cfae0e" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.106810] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.107196] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.107196] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.107324] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.107459] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1188.400555] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.400731] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.906180] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.404819] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.405237] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1189.947106] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.947267] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.947419] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1191.161330] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.663765] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.663975] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1191.664220] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.776775] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.777045] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.280495] env[61905]: DEBUG nova.compute.utils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1193.404610] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1193.783625] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.907760] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.908031] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.908123] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.908282] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1193.909192] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5065a1a-8779-492b-8a89-7689c1353b1f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.917210] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658110bd-d119-4425-a8ff-8fe8990e0c41 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.930981] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea2c986-e445-4f90-93df-6cb10a63c108 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.937574] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5362396e-465a-43b4-8277-db318581f77b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.965618] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181059MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1193.965764] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.965950] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.841097] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.841484] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.841714] env[61905]: INFO nova.compute.manager [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Attaching volume 6d302b2b-62f2-4b93-9da0-7474fc28d6ff to /dev/sdb [ 1194.870615] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db7aabc-319f-445d-9409-a4f8f053e24b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.879157] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68fb4b8-4b5d-42e5-ae7c-47dc7c1ee8f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.891597] env[61905]: DEBUG nova.virt.block_device [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating existing volume attachment record: ba164eb9-ecef-41db-baca-7c00cf747778 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1194.990809] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.990969] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance e56c9f4f-1398-4370-9b31-5ef91acc78f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.991107] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 3c9eec15-9349-478f-9429-a33cf5cfae0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.991284] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1194.991423] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1195.038172] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728d80fd-3d72-4164-baa1-a4c3dddd7176 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.045896] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24851536-4e47-4e79-9357-8f0b36147ead {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.074662] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b124b83-36dc-491f-b22b-682c14847376 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.082069] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dce77d3-efcd-4442-82da-7df6014fdad8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.095209] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.598151] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1196.103461] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1196.103887] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.138s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.435231] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1199.435544] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1199.436423] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a20fc6e-c2d4-4474-8bd4-1397729c643e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.453902] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc64af3-3e84-46d1-879b-67b528db7cde {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.477296] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff/volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.477550] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b99c9708-b5af-4476-8a6e-51c0f8d96816 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.495873] env[61905]: DEBUG oslo_vmware.api [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1199.495873] env[61905]: value = "task-1363033" [ 1199.495873] env[61905]: _type = "Task" [ 1199.495873] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.503655] env[61905]: DEBUG oslo_vmware.api [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.005710] env[61905]: DEBUG oslo_vmware.api [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363033, 'name': ReconfigVM_Task, 'duration_secs': 0.347575} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.006098] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff/volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.010622] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d5f54e2-9575-4307-9360-d76c2efc0f98 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.024608] env[61905]: DEBUG oslo_vmware.api [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1200.024608] env[61905]: value = "task-1363034" [ 1200.024608] env[61905]: _type = "Task" [ 1200.024608] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.031833] env[61905]: DEBUG oslo_vmware.api [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363034, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.534924] env[61905]: DEBUG oslo_vmware.api [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363034, 'name': ReconfigVM_Task, 'duration_secs': 0.133747} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.535249] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1201.571039] env[61905]: DEBUG nova.objects.instance [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'flavor' on Instance uuid e56c9f4f-1398-4370-9b31-5ef91acc78f0 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.079157] env[61905]: DEBUG oslo_concurrency.lockutils [None req-f94688f0-09f9-4999-ad17-75e0ee3f7cee tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.238s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.412547] env[61905]: INFO nova.compute.manager [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Rebuilding instance [ 1202.451926] env[61905]: DEBUG nova.compute.manager [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1202.452904] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0993de82-bea8-4536-8556-b9c3acc24da6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.964267] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1202.964607] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5c29af6-afe3-47cc-be0b-c47f739c26d1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.971872] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1202.971872] env[61905]: value = "task-1363035" [ 1202.971872] env[61905]: _type = "Task" [ 1202.971872] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.979614] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.481510] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363035, 'name': PowerOffVM_Task, 'duration_secs': 0.199527} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.481844] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.536944] env[61905]: INFO nova.compute.manager [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Detaching volume 6d302b2b-62f2-4b93-9da0-7474fc28d6ff [ 1203.565667] env[61905]: INFO nova.virt.block_device [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Attempting to driver detach volume 6d302b2b-62f2-4b93-9da0-7474fc28d6ff from mountpoint /dev/sdb [ 1203.565917] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1203.566115] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1203.566963] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3937f86-f928-4c86-b68f-8d979dc833e1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.588149] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252e6c0b-e792-49d3-8f07-9354785964aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.594815] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9818edf-c643-4499-b7c1-dbb4c04291c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.614249] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755b2886-96ef-4897-8218-737bc1549723 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.627933] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] The volume has not been displaced from its original location: [datastore2] volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff/volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1203.632952] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1203.633198] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ecc1c17-659d-44f4-9cae-cfbc7863072b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.649713] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1203.649713] env[61905]: value = "task-1363036" [ 1203.649713] env[61905]: _type = "Task" [ 1203.649713] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.656787] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.158861] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363036, 'name': ReconfigVM_Task, 'duration_secs': 0.179507} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.159147] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1204.163902] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcae5aaf-57c4-4eaf-af3e-aa3e71690dd6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.179184] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1204.179184] env[61905]: value = "task-1363037" [ 1204.179184] env[61905]: _type = "Task" [ 1204.179184] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.186836] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363037, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.690685] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363037, 'name': ReconfigVM_Task, 'duration_secs': 0.136284} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.690998] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1205.736250] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1205.736578] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a1a1eab-ddc9-46f1-a965-3932f96909ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.743803] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1205.743803] env[61905]: value = "task-1363038" [ 1205.743803] env[61905]: _type = "Task" [ 1205.743803] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.751181] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363038, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.254438] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] VM already powered off {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1206.254661] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1206.254853] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1206.255598] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5620ecd3-b5a2-4e4f-a525-308efef19e6d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.273447] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3d48e5-c3e0-41cc-a067-0d2bdb4fb2a7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.279758] env[61905]: WARNING nova.virt.vmwareapi.driver [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1206.280065] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1206.280758] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5bd2ae-447c-49ba-936a-c8bdc6047345 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.286867] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1206.287092] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0e27792-9555-4e25-be7c-24805ee5cdb0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.395849] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1206.396094] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1206.396275] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1206.396550] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f61e145d-0099-44b1-801b-9211ed61f2c3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.402917] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1206.402917] env[61905]: value = "task-1363040" [ 1206.402917] env[61905]: _type = "Task" [ 1206.402917] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.409971] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.913029] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146563} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.913410] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1206.913475] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1206.913595] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1207.418885] env[61905]: INFO nova.virt.block_device [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Booting with volume 6d302b2b-62f2-4b93-9da0-7474fc28d6ff at /dev/sdb [ 1207.447893] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0b89de2-a39d-4c49-8c04-f1a0cb4c4280 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.457023] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff11e976-1e83-4327-83db-15cfd708424f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.480159] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da852c38-f68e-4929-b551-b24a71091a81 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.487709] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfbec2e-26f1-4264-87d6-b4c88a87b35a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.511091] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99814cff-15a9-4933-886c-fe23e29aeda0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.517106] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9420ae5-a72f-48b6-a1a9-87044e7d4040 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.529462] env[61905]: DEBUG nova.virt.block_device [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating existing volume attachment record: c4f7f6c8-598e-4ec4-9cdc-61684b013db6 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1209.627929] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1209.627929] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1209.627929] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1209.627929] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1209.627929] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1209.627929] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1209.628697] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1209.628697] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1209.628697] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1209.628697] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1209.628935] env[61905]: DEBUG nova.virt.hardware [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1209.629677] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b629ed-d55d-4396-bf2d-a7000e066828 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.637397] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74666c7c-a2e3-47d1-b48b-563600f6c1e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.651056] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:6f:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52bb8f28-a061-4639-988e-2d97db166c66', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.658258] env[61905]: DEBUG oslo.service.loopingcall [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1209.658497] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.658711] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76129ee5-4456-4c1b-9639-ec41265b1f17 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.677424] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.677424] env[61905]: value = "task-1363041" [ 1209.677424] env[61905]: _type = "Task" [ 1209.677424] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.684318] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363041, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.186729] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363041, 'name': CreateVM_Task, 'duration_secs': 0.295024} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.186977] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1210.187804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.187942] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.188322] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1210.188605] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2d81109-93a4-4ad3-9f27-2089af08ba80 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.192695] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1210.192695] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529aa754-7585-79a9-41a1-8a560fccfa5d" [ 1210.192695] env[61905]: _type = "Task" [ 1210.192695] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.199831] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529aa754-7585-79a9-41a1-8a560fccfa5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.705253] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]529aa754-7585-79a9-41a1-8a560fccfa5d, 'name': SearchDatastore_Task, 'duration_secs': 0.00838} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.705671] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.706053] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.706427] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.706692] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.706995] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.707359] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13544c94-fb77-42cd-9c23-6dbc5da950cb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.715977] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.716177] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1210.716869] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-563934a5-8151-4a03-b028-5089fe83664a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.721672] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1210.721672] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b6c689-e332-01cf-9e3a-d25dd73a8beb" [ 1210.721672] env[61905]: _type = "Task" [ 1210.721672] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.730369] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b6c689-e332-01cf-9e3a-d25dd73a8beb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.232184] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52b6c689-e332-01cf-9e3a-d25dd73a8beb, 'name': SearchDatastore_Task, 'duration_secs': 0.008126} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.232991] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0690127d-cd42-4720-b812-f3a4e63db70d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.237958] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1211.237958] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528c9efd-b56f-80d5-5fe0-06370b388d62" [ 1211.237958] env[61905]: _type = "Task" [ 1211.237958] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.245264] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528c9efd-b56f-80d5-5fe0-06370b388d62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.749922] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]528c9efd-b56f-80d5-5fe0-06370b388d62, 'name': SearchDatastore_Task, 'duration_secs': 0.009654} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.749922] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.749922] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1211.749922] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2282092f-0df3-49c3-b20c-e27dd421548f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.755256] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1211.755256] env[61905]: value = "task-1363042" [ 1211.755256] env[61905]: _type = "Task" [ 1211.755256] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.762188] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.265249] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363042, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445394} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.265494] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1212.265700] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1212.265950] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f2540ec-8d1b-4c16-afd0-e7811cd76cec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.272380] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1212.272380] env[61905]: value = "task-1363043" [ 1212.272380] env[61905]: _type = "Task" [ 1212.272380] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.279809] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.781900] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066653} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.782262] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1212.783021] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a5c482-9c55-4b9d-829c-1bce251c6b5d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.804231] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1212.804481] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e77e8a3-abd7-4d34-b3d9-8f0e2a26fe98 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.822507] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1212.822507] env[61905]: value = "task-1363044" [ 1212.822507] env[61905]: _type = "Task" [ 1212.822507] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.829558] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363044, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.332685] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363044, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.833100] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363044, 'name': ReconfigVM_Task, 'duration_secs': 0.889723} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.833506] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0/e56c9f4f-1398-4370-9b31-5ef91acc78f0.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1213.834821] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'device_name': '/dev/sda', 'disk_bus': None, 'device_type': 'disk', 'size': 0, 'boot_index': 0, 'guest_format': None, 'encryption_format': None, 'encrypted': False, 'encryption_secret_uuid': None, 'image_id': '4d166298-c700-4bc6-8f8f-67684a277053'}], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'attachment_id': 'c4f7f6c8-598e-4ec4-9cdc-61684b013db6', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'}, 'device_type': None, 'boot_index': None, 'mount_device': '/dev/sdb', 'guest_format': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=61905) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1213.835040] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1213.835235] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1213.835963] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81954e96-63c3-4666-9f31-e959e1055d28 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.850328] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f11c2c-3a69-4acd-89b5-5c0255cb9fbe {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.874133] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff/volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.874360] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-956bbd55-827e-4613-9020-636dd1c86ba0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.892009] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1213.892009] env[61905]: value = "task-1363045" [ 1213.892009] env[61905]: _type = "Task" [ 1213.892009] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.899290] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363045, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.401645] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363045, 'name': ReconfigVM_Task, 'duration_secs': 0.281018} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.401895] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff/volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.406472] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfaa76b4-a573-4a90-b9a1-27799c2f7938 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.420485] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1214.420485] env[61905]: value = "task-1363046" [ 1214.420485] env[61905]: _type = "Task" [ 1214.420485] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.427773] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363046, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.930687] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363046, 'name': ReconfigVM_Task, 'duration_secs': 0.159713} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.931111] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1214.931625] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6311768-ebb9-4044-bfe2-5278c8e9154b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.937965] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1214.937965] env[61905]: value = "task-1363047" [ 1214.937965] env[61905]: _type = "Task" [ 1214.937965] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.945941] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363047, 'name': Rename_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.447521] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363047, 'name': Rename_Task, 'duration_secs': 0.15385} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.447785] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1215.448056] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ee57f65-a7ce-4136-ad83-fcec294e92f2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.454129] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1215.454129] env[61905]: value = "task-1363048" [ 1215.454129] env[61905]: _type = "Task" [ 1215.454129] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.463862] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363048, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.964437] env[61905]: DEBUG oslo_vmware.api [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363048, 'name': PowerOnVM_Task, 'duration_secs': 0.43989} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.964746] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1215.964963] env[61905]: DEBUG nova.compute.manager [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1215.965758] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001a2319-eb31-4182-a38d-6faefa175cc0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.487990] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1216.488289] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.488472] env[61905]: DEBUG nova.objects.instance [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61905) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1217.359167] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.359576] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.500986] env[61905]: DEBUG oslo_concurrency.lockutils [None req-c63cd13f-7d55-4d83-9b63-66b62edc8f0a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.862955] env[61905]: INFO nova.compute.manager [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Detaching volume 6d302b2b-62f2-4b93-9da0-7474fc28d6ff [ 1217.893242] env[61905]: INFO nova.virt.block_device [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Attempting to driver detach volume 6d302b2b-62f2-4b93-9da0-7474fc28d6ff from mountpoint /dev/sdb [ 1217.894087] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1217.894087] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1217.894967] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d381b7f-4f2b-4bc3-ae7b-227a9995642d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.915345] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91aa120c-33b1-4f61-9df7-531cd18650c2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.922014] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89c5496-1d4d-4c35-b6fc-1e35b515da62 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.940987] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe60f8bf-f4d9-4b45-a38e-5b45a66aa0c0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.954992] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] The volume has not been displaced from its original location: [datastore2] volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff/volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1217.960107] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1217.960396] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a8c3ba4-07d0-49df-b63f-d07e37c28af4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.978064] env[61905]: DEBUG oslo_vmware.api [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1217.978064] env[61905]: value = "task-1363049" [ 1217.978064] env[61905]: _type = "Task" [ 1217.978064] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.985340] env[61905]: DEBUG oslo_vmware.api [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363049, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.487712] env[61905]: DEBUG oslo_vmware.api [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363049, 'name': ReconfigVM_Task, 'duration_secs': 0.361506} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.488067] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1218.492506] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-210295fd-6910-48fc-8ce1-a72767284995 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.507404] env[61905]: DEBUG oslo_vmware.api [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1218.507404] env[61905]: value = "task-1363050" [ 1218.507404] env[61905]: _type = "Task" [ 1218.507404] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.514807] env[61905]: DEBUG oslo_vmware.api [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.016853] env[61905]: DEBUG oslo_vmware.api [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363050, 'name': ReconfigVM_Task, 'duration_secs': 0.140993} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.017168] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290120', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'name': 'volume-6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e56c9f4f-1398-4370-9b31-5ef91acc78f0', 'attached_at': '', 'detached_at': '', 'volume_id': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff', 'serial': '6d302b2b-62f2-4b93-9da0-7474fc28d6ff'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1219.558712] env[61905]: DEBUG nova.objects.instance [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'flavor' on Instance uuid e56c9f4f-1398-4370-9b31-5ef91acc78f0 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.568052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-b698a22d-662d-4c04-bee1-2fbe44282131 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.179109] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.179358] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.603283] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.603640] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.603766] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.603955] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.604143] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.606330] env[61905]: INFO nova.compute.manager [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Terminating instance [ 1221.608231] env[61905]: DEBUG nova.compute.manager [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1221.608424] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.609256] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfc67ce-08f6-4246-8e7c-247e55f6f7ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.616976] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.617237] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-772407eb-f3cc-4b4a-ab44-e88a081b66c5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.622814] env[61905]: DEBUG oslo_vmware.api [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1221.622814] env[61905]: value = "task-1363051" [ 1221.622814] env[61905]: _type = "Task" [ 1221.622814] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.629981] env[61905]: DEBUG oslo_vmware.api [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.682164] env[61905]: DEBUG nova.compute.utils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1222.132501] env[61905]: DEBUG oslo_vmware.api [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363051, 'name': PowerOffVM_Task, 'duration_secs': 0.192915} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.132806] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.132979] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.133256] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c66eb663-5df2-4508-929a-3917069aac32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.184596] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.197622] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.197831] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.198021] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore2] e56c9f4f-1398-4370-9b31-5ef91acc78f0 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.198286] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26905dcc-48fe-4a3e-8171-f33beda8a27f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.204032] env[61905]: DEBUG oslo_vmware.api [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1222.204032] env[61905]: value = "task-1363053" [ 1222.204032] env[61905]: _type = "Task" [ 1222.204032] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.211837] env[61905]: DEBUG oslo_vmware.api [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.714352] env[61905]: DEBUG oslo_vmware.api [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161234} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.714648] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.714801] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.714952] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.715157] env[61905]: INFO nova.compute.manager [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1222.715396] env[61905]: DEBUG oslo.service.loopingcall [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1222.715586] env[61905]: DEBUG nova.compute.manager [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1222.715679] env[61905]: DEBUG nova.network.neutron [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1223.134728] env[61905]: DEBUG nova.compute.manager [req-fbfb4536-9acc-4f53-a4bd-8912dc80a7ca req-9d41eb0b-b301-4a5b-8621-dde72c7891f5 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Received event network-vif-deleted-52bb8f28-a061-4639-988e-2d97db166c66 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1223.134940] env[61905]: INFO nova.compute.manager [req-fbfb4536-9acc-4f53-a4bd-8912dc80a7ca req-9d41eb0b-b301-4a5b-8621-dde72c7891f5 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Neutron deleted interface 52bb8f28-a061-4639-988e-2d97db166c66; detaching it from the instance and deleting it from the info cache [ 1223.135150] env[61905]: DEBUG nova.network.neutron [req-fbfb4536-9acc-4f53-a4bd-8912dc80a7ca req-9d41eb0b-b301-4a5b-8621-dde72c7891f5 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.251429] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1223.251877] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1223.252151] env[61905]: INFO nova.compute.manager [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Attaching volume cae5633c-4a4e-4d4e-99ec-aaf8de34fdee to /dev/sdb [ 1223.290163] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf23d21-3643-4da3-9823-ef25183af814 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.297828] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441cf21b-3662-4082-bd60-06083ce586a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.312424] env[61905]: DEBUG nova.virt.block_device [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updating existing volume attachment record: 9b592a62-0a90-4bd0-b243-634ad35b8ae2 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1223.610149] env[61905]: DEBUG nova.network.neutron [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.638786] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91980821-c2bd-4c51-aea7-0c24b12bd48b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.648848] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7232b179-ce4e-42c2-be60-4c248e7a46ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.673724] env[61905]: DEBUG nova.compute.manager [req-fbfb4536-9acc-4f53-a4bd-8912dc80a7ca req-9d41eb0b-b301-4a5b-8621-dde72c7891f5 service nova] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Detach interface failed, port_id=52bb8f28-a061-4639-988e-2d97db166c66, reason: Instance e56c9f4f-1398-4370-9b31-5ef91acc78f0 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1224.113414] env[61905]: INFO nova.compute.manager [-] [instance: e56c9f4f-1398-4370-9b31-5ef91acc78f0] Took 1.40 seconds to deallocate network for instance. [ 1224.619601] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.619927] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.620169] env[61905]: DEBUG nova.objects.instance [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'resources' on Instance uuid e56c9f4f-1398-4370-9b31-5ef91acc78f0 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1225.174999] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906cc2a8-e8cf-4a5e-8d66-acca4a61cb70 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.182626] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496b2431-b413-4046-a5b0-e8898f0ca3eb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.212318] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cef280e-f7b3-410f-b301-a0278be8e1aa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.219154] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62843bf2-b6d7-4da6-b478-359e87eb27e2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.239921] env[61905]: DEBUG nova.compute.provider_tree [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.743869] env[61905]: DEBUG nova.scheduler.client.report [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1226.248463] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.271581] env[61905]: INFO nova.scheduler.client.report [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted allocations for instance e56c9f4f-1398-4370-9b31-5ef91acc78f0 [ 1226.779678] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d06574a9-b2c9-4251-a94f-4ddd7472fea0 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "e56c9f4f-1398-4370-9b31-5ef91acc78f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.176s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.865038] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1227.865325] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290122', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'name': 'volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3c9eec15-9349-478f-9429-a33cf5cfae0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'serial': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1227.866481] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea2c0bc-1cfd-4eab-a7b7-e8029c7c9382 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.882893] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26468334-6d96-4a92-8404-bae400cc1edb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.906585] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee/volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1227.906585] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ab08063-cc59-4aa3-9556-8b4afeb60fae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.924380] env[61905]: DEBUG oslo_vmware.api [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1227.924380] env[61905]: value = "task-1363056" [ 1227.924380] env[61905]: _type = "Task" [ 1227.924380] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.932934] env[61905]: DEBUG oslo_vmware.api [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363056, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.166066] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.166310] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.434379] env[61905]: DEBUG oslo_vmware.api [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363056, 'name': ReconfigVM_Task, 'duration_secs': 0.340853} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.434598] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Reconfigured VM instance instance-0000006d to attach disk [datastore2] volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee/volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1228.439456] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-399fc288-c531-42a0-93a1-40cef0d2caa7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.454214] env[61905]: DEBUG oslo_vmware.api [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1228.454214] env[61905]: value = "task-1363057" [ 1228.454214] env[61905]: _type = "Task" [ 1228.454214] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.461710] env[61905]: DEBUG oslo_vmware.api [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363057, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.669174] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1228.964725] env[61905]: DEBUG oslo_vmware.api [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363057, 'name': ReconfigVM_Task, 'duration_secs': 0.131898} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.965075] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290122', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'name': 'volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3c9eec15-9349-478f-9429-a33cf5cfae0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'serial': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1229.188282] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.188538] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.189917] env[61905]: INFO nova.compute.claims [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1230.000051] env[61905]: DEBUG nova.objects.instance [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'flavor' on Instance uuid 3c9eec15-9349-478f-9429-a33cf5cfae0e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.242859] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ccb41b-caa5-41cf-9971-18bf9a8ecf27 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.251637] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e081eaa-4681-4b77-8d1d-75fc6d327c04 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.280392] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b6c76c-562e-4c1e-b0a4-03abcebea993 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.287511] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11c1049-075b-47e5-8149-af5ca8469333 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.300658] env[61905]: DEBUG nova.compute.provider_tree [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.505980] env[61905]: DEBUG oslo_concurrency.lockutils [None req-d17127a1-9cef-4de8-9b78-ca52cb48c872 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.667459] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.667717] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.803900] env[61905]: DEBUG nova.scheduler.client.report [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1231.170799] env[61905]: INFO nova.compute.manager [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Detaching volume cae5633c-4a4e-4d4e-99ec-aaf8de34fdee [ 1231.199664] env[61905]: INFO nova.virt.block_device [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Attempting to driver detach volume cae5633c-4a4e-4d4e-99ec-aaf8de34fdee from mountpoint /dev/sdb [ 1231.199904] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1231.200105] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290122', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'name': 'volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3c9eec15-9349-478f-9429-a33cf5cfae0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'serial': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1231.201104] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bb2bea-2e80-4cab-ad34-99cbdfb6a496 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.222712] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7231e427-11ae-4a00-81d3-3f3c05c06967 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.229140] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c880b759-7f49-403b-8b58-17428681d810 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.247904] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00a79bd-ee53-4676-afc6-4798ac7632c8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.261392] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] The volume has not been displaced from its original location: [datastore2] volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee/volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1231.266408] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1231.266665] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50c0ccc0-6370-4489-8134-b337cdc0fbe6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.283640] env[61905]: DEBUG oslo_vmware.api [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1231.283640] env[61905]: value = "task-1363058" [ 1231.283640] env[61905]: _type = "Task" [ 1231.283640] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.290656] env[61905]: DEBUG oslo_vmware.api [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.308456] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.120s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.308953] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1231.793152] env[61905]: DEBUG oslo_vmware.api [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363058, 'name': ReconfigVM_Task, 'duration_secs': 0.218836} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.793439] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1231.798124] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-300ca504-849f-4eb8-ba3f-f8c64c58d952 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.814383] env[61905]: DEBUG nova.compute.utils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1231.815780] env[61905]: DEBUG oslo_vmware.api [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1231.815780] env[61905]: value = "task-1363059" [ 1231.815780] env[61905]: _type = "Task" [ 1231.815780] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.816218] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1231.816389] env[61905]: DEBUG nova.network.neutron [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1231.825940] env[61905]: DEBUG oslo_vmware.api [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363059, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.871787] env[61905]: DEBUG nova.policy [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b126f47b9df4f4586f377f70faada62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edbab61ee8984d0c91eab473eba0047c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1232.126991] env[61905]: DEBUG nova.network.neutron [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Successfully created port: 5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1232.317610] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1232.328737] env[61905]: DEBUG oslo_vmware.api [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363059, 'name': ReconfigVM_Task, 'duration_secs': 0.137473} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.329034] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290122', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'name': 'volume-cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3c9eec15-9349-478f-9429-a33cf5cfae0e', 'attached_at': '', 'detached_at': '', 'volume_id': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee', 'serial': 'cae5633c-4a4e-4d4e-99ec-aaf8de34fdee'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1232.875861] env[61905]: DEBUG nova.objects.instance [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'flavor' on Instance uuid 3c9eec15-9349-478f-9429-a33cf5cfae0e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1233.329868] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1233.354589] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T05:11:41Z,direct_url=,disk_format='vmdk',id=4d166298-c700-4bc6-8f8f-67684a277053,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f6c2fce65e3547a4b7223b2703324404',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T05:11:42Z,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1233.354841] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1233.355059] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1233.355261] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1233.355409] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1233.355554] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1233.355754] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1233.355912] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1233.356086] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1233.356249] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1233.356419] env[61905]: DEBUG nova.virt.hardware [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1233.357284] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4afbe28-d351-43d9-91e5-88b5cf42cff0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.364985] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe3d65a-8fdf-43cd-ad55-59d362b6be36 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.495564] env[61905]: DEBUG nova.compute.manager [req-696cc515-38c4-418d-83c0-b7b8cfdfd47d req-eaa2404e-cb9b-4f4a-acbc-e29b370c3116 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Received event network-vif-plugged-5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1233.495800] env[61905]: DEBUG oslo_concurrency.lockutils [req-696cc515-38c4-418d-83c0-b7b8cfdfd47d req-eaa2404e-cb9b-4f4a-acbc-e29b370c3116 service nova] Acquiring lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.496016] env[61905]: DEBUG oslo_concurrency.lockutils [req-696cc515-38c4-418d-83c0-b7b8cfdfd47d req-eaa2404e-cb9b-4f4a-acbc-e29b370c3116 service nova] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.496194] env[61905]: DEBUG oslo_concurrency.lockutils [req-696cc515-38c4-418d-83c0-b7b8cfdfd47d req-eaa2404e-cb9b-4f4a-acbc-e29b370c3116 service nova] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.496361] env[61905]: DEBUG nova.compute.manager [req-696cc515-38c4-418d-83c0-b7b8cfdfd47d req-eaa2404e-cb9b-4f4a-acbc-e29b370c3116 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] No waiting events found dispatching network-vif-plugged-5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1233.496523] env[61905]: WARNING nova.compute.manager [req-696cc515-38c4-418d-83c0-b7b8cfdfd47d req-eaa2404e-cb9b-4f4a-acbc-e29b370c3116 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Received unexpected event network-vif-plugged-5121d3e3-74ae-4238-b6a6-5f789158dbeb for instance with vm_state building and task_state spawning. [ 1233.575430] env[61905]: DEBUG nova.network.neutron [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Successfully updated port: 5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.882506] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ae0ded6e-dfc4-487c-bc7d-0472e9470bf5 tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.215s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.080068] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.080348] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.080616] env[61905]: DEBUG nova.network.neutron [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.611905] env[61905]: DEBUG nova.network.neutron [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1234.728969] env[61905]: DEBUG nova.network.neutron [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updating instance_info_cache with network_info: [{"id": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "address": "fa:16:3e:62:52:23", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5121d3e3-74", "ovs_interfaceid": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.910234] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.910442] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.910702] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "3c9eec15-9349-478f-9429-a33cf5cfae0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.910895] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.911091] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.913203] env[61905]: INFO nova.compute.manager [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Terminating instance [ 1234.914891] env[61905]: DEBUG nova.compute.manager [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1234.915107] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1234.915927] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ee8f5a-dd3f-47a9-b1cb-1d63cc38e1e1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.923456] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.923684] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa98e9b4-2a2f-45ef-b33c-7f10abf48f94 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.929390] env[61905]: DEBUG oslo_vmware.api [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1234.929390] env[61905]: value = "task-1363060" [ 1234.929390] env[61905]: _type = "Task" [ 1234.929390] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.936996] env[61905]: DEBUG oslo_vmware.api [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.231578] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.231897] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Instance network_info: |[{"id": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "address": "fa:16:3e:62:52:23", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5121d3e3-74", "ovs_interfaceid": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1235.232346] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:52:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5121d3e3-74ae-4238-b6a6-5f789158dbeb', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1235.239607] env[61905]: DEBUG oslo.service.loopingcall [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1235.239816] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1235.240053] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd12296c-9c45-4db8-82e5-d281fb318cbf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.260851] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.260851] env[61905]: value = "task-1363061" [ 1235.260851] env[61905]: _type = "Task" [ 1235.260851] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.268176] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363061, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.440749] env[61905]: DEBUG oslo_vmware.api [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363060, 'name': PowerOffVM_Task, 'duration_secs': 0.172371} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.441010] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.441223] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.441483] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ddf223c-d2c9-4a62-ab17-9bcee91293b9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.503572] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1235.503844] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1235.504354] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleting the datastore file [datastore1] 3c9eec15-9349-478f-9429-a33cf5cfae0e {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.504405] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e72c2409-cba4-4bf9-97f6-6e1fa382e991 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.511082] env[61905]: DEBUG oslo_vmware.api [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for the task: (returnval){ [ 1235.511082] env[61905]: value = "task-1363063" [ 1235.511082] env[61905]: _type = "Task" [ 1235.511082] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.519498] env[61905]: DEBUG oslo_vmware.api [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.522498] env[61905]: DEBUG nova.compute.manager [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Received event network-changed-5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1235.522714] env[61905]: DEBUG nova.compute.manager [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Refreshing instance network info cache due to event network-changed-5121d3e3-74ae-4238-b6a6-5f789158dbeb. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1235.522956] env[61905]: DEBUG oslo_concurrency.lockutils [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] Acquiring lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.523137] env[61905]: DEBUG oslo_concurrency.lockutils [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] Acquired lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.523292] env[61905]: DEBUG nova.network.neutron [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Refreshing network info cache for port 5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1235.770645] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363061, 'name': CreateVM_Task, 'duration_secs': 0.307434} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.770983] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1235.771465] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.771637] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.771952] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1235.772219] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5402ffe-15b4-4eb9-80c7-83808951900a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.776630] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1235.776630] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5226a03c-6e0b-45a8-a59f-5305065f6fc0" [ 1235.776630] env[61905]: _type = "Task" [ 1235.776630] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.784664] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5226a03c-6e0b-45a8-a59f-5305065f6fc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.021316] env[61905]: DEBUG oslo_vmware.api [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Task: {'id': task-1363063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136425} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.021522] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.021709] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.021891] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.022084] env[61905]: INFO nova.compute.manager [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1236.022341] env[61905]: DEBUG oslo.service.loopingcall [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1236.022532] env[61905]: DEBUG nova.compute.manager [-] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1236.022627] env[61905]: DEBUG nova.network.neutron [-] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1236.287383] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5226a03c-6e0b-45a8-a59f-5305065f6fc0, 'name': SearchDatastore_Task, 'duration_secs': 0.009884} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.287683] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.287917] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Processing image 4d166298-c700-4bc6-8f8f-67684a277053 {{(pid=61905) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.288166] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.288316] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.288492] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.288752] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b702c087-ebcb-478c-9969-83a4d5acfba0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.297103] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61905) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.297449] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61905) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.298160] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92340a74-6c74-4a44-b789-7994136daafd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.303449] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1236.303449] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283bb8e-410e-eb71-dbf7-feddc280c916" [ 1236.303449] env[61905]: _type = "Task" [ 1236.303449] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.313359] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283bb8e-410e-eb71-dbf7-feddc280c916, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.366011] env[61905]: DEBUG nova.network.neutron [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updated VIF entry in instance network info cache for port 5121d3e3-74ae-4238-b6a6-5f789158dbeb. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1236.366400] env[61905]: DEBUG nova.network.neutron [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updating instance_info_cache with network_info: [{"id": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "address": "fa:16:3e:62:52:23", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5121d3e3-74", "ovs_interfaceid": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.815861] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5283bb8e-410e-eb71-dbf7-feddc280c916, 'name': SearchDatastore_Task, 'duration_secs': 0.009656} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.816655] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d187fa7-3af7-4ed3-acc0-1d18bc0e88a9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.822141] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1236.822141] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5222256e-4c32-d2ce-077e-aaacb7240d2c" [ 1236.822141] env[61905]: _type = "Task" [ 1236.822141] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.831022] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5222256e-4c32-d2ce-077e-aaacb7240d2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.869256] env[61905]: DEBUG oslo_concurrency.lockutils [req-76ab22b1-31f4-446a-82ab-487910ff06bd req-c879882d-1edc-45d5-b754-ca87a0f8ca6c service nova] Releasing lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.076024] env[61905]: DEBUG nova.network.neutron [-] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.333259] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]5222256e-4c32-d2ce-077e-aaacb7240d2c, 'name': SearchDatastore_Task, 'duration_secs': 0.009212} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.333804] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.333853] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 821aa0cb-1947-46fe-bc0a-4900baa8cf82/821aa0cb-1947-46fe-bc0a-4900baa8cf82.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1237.334130] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ae52f20-ce06-4493-8096-b16304f328f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.340981] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1237.340981] env[61905]: value = "task-1363064" [ 1237.340981] env[61905]: _type = "Task" [ 1237.340981] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.349677] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.553580] env[61905]: DEBUG nova.compute.manager [req-31907d33-9934-4272-aee4-d3b9a0efdb20 req-67391d8b-1533-46b8-bb94-54a3e327b730 service nova] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Received event network-vif-deleted-59ef522b-b64f-4feb-8ab1-1be5cde2ebb5 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1237.578627] env[61905]: INFO nova.compute.manager [-] [instance: 3c9eec15-9349-478f-9429-a33cf5cfae0e] Took 1.56 seconds to deallocate network for instance. [ 1237.850933] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500912} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.851308] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/4d166298-c700-4bc6-8f8f-67684a277053/4d166298-c700-4bc6-8f8f-67684a277053.vmdk to [datastore2] 821aa0cb-1947-46fe-bc0a-4900baa8cf82/821aa0cb-1947-46fe-bc0a-4900baa8cf82.vmdk {{(pid=61905) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1237.851384] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Extending root virtual disk to 1048576 {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1237.851619] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1311dec-523d-4eb9-b169-00b6ccded2b3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.857486] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1237.857486] env[61905]: value = "task-1363065" [ 1237.857486] env[61905]: _type = "Task" [ 1237.857486] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.865904] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.086566] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.086849] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.087091] env[61905]: DEBUG nova.objects.instance [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lazy-loading 'resources' on Instance uuid 3c9eec15-9349-478f-9429-a33cf5cfae0e {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1238.366598] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069122} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.366865] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Extended root virtual disk {{(pid=61905) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1238.367671] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e92e54-8be3-4030-b7b0-71fd20a1c7c8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.391532] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 821aa0cb-1947-46fe-bc0a-4900baa8cf82/821aa0cb-1947-46fe-bc0a-4900baa8cf82.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1238.391847] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df2ee5ba-440d-4f07-8948-180154b86281 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.413194] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1238.413194] env[61905]: value = "task-1363066" [ 1238.413194] env[61905]: _type = "Task" [ 1238.413194] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.420808] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363066, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.640586] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fbceee-f3ee-410e-a749-16b089a77bed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.647865] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61086ba-a95a-4f05-bc24-6646ef0394ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.679738] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c199f31f-0c8d-4da3-a7ba-f03ab8b805d3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.687187] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a41800-f685-480f-9218-9ce1db4db8f0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.699577] env[61905]: DEBUG nova.compute.provider_tree [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.923205] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363066, 'name': ReconfigVM_Task, 'duration_secs': 0.273723} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.923566] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 821aa0cb-1947-46fe-bc0a-4900baa8cf82/821aa0cb-1947-46fe-bc0a-4900baa8cf82.vmdk or device None with type sparse {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.924199] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f11dfb6-a103-4282-87b2-131e588ce104 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.930602] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1238.930602] env[61905]: value = "task-1363067" [ 1238.930602] env[61905]: _type = "Task" [ 1238.930602] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.938560] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363067, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.202561] env[61905]: DEBUG nova.scheduler.client.report [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1239.440575] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363067, 'name': Rename_Task, 'duration_secs': 0.135441} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.440892] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1239.441153] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b41713f0-1f51-4c88-bab0-01d71e54ca5a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.447694] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1239.447694] env[61905]: value = "task-1363068" [ 1239.447694] env[61905]: _type = "Task" [ 1239.447694] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.454848] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.707090] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.725471] env[61905]: INFO nova.scheduler.client.report [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Deleted allocations for instance 3c9eec15-9349-478f-9429-a33cf5cfae0e [ 1239.957905] env[61905]: DEBUG oslo_vmware.api [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363068, 'name': PowerOnVM_Task, 'duration_secs': 0.472263} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.958258] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1239.958371] env[61905]: INFO nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Took 6.63 seconds to spawn the instance on the hypervisor. [ 1239.958551] env[61905]: DEBUG nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1239.959313] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8996a4e2-bdbb-44fd-8683-34607a6f797d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.232907] env[61905]: DEBUG oslo_concurrency.lockutils [None req-166a77db-21a9-4c97-aac1-68cb2e42067a tempest-AttachVolumeNegativeTest-843957365 tempest-AttachVolumeNegativeTest-843957365-project-member] Lock "3c9eec15-9349-478f-9429-a33cf5cfae0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.322s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.478603] env[61905]: INFO nova.compute.manager [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Took 11.30 seconds to build instance. [ 1240.980527] env[61905]: DEBUG oslo_concurrency.lockutils [None req-0f5c0e90-85fd-4f1e-96ca-5fe558a1f785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.814s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.038499] env[61905]: DEBUG nova.compute.manager [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Received event network-changed-5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1241.038499] env[61905]: DEBUG nova.compute.manager [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Refreshing instance network info cache due to event network-changed-5121d3e3-74ae-4238-b6a6-5f789158dbeb. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1241.038652] env[61905]: DEBUG oslo_concurrency.lockutils [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] Acquiring lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.038802] env[61905]: DEBUG oslo_concurrency.lockutils [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] Acquired lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.038961] env[61905]: DEBUG nova.network.neutron [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Refreshing network info cache for port 5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1241.739169] env[61905]: DEBUG nova.network.neutron [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updated VIF entry in instance network info cache for port 5121d3e3-74ae-4238-b6a6-5f789158dbeb. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1241.739612] env[61905]: DEBUG nova.network.neutron [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updating instance_info_cache with network_info: [{"id": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "address": "fa:16:3e:62:52:23", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5121d3e3-74", "ovs_interfaceid": "5121d3e3-74ae-4238-b6a6-5f789158dbeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.242248] env[61905]: DEBUG oslo_concurrency.lockutils [req-f4914bd7-4987-4140-abe5-5da7d4b39119 req-e95740bf-8185-453c-af4a-8ef00be5cd96 service nova] Releasing lock "refresh_cache-821aa0cb-1947-46fe-bc0a-4900baa8cf82" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.104411] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.104700] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.104871] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.104974] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1248.405233] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.399894] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.404856] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.405289] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.405197] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1251.405641] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1252.411800] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Didn't find any instances for network info cache update. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1254.404295] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1254.907055] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.907213] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.907382] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.907534] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1254.908438] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec49cc7c-466a-45aa-99ab-cac43b99585d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.917300] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28394213-91bb-4f7e-8a76-ff3d6fd8862b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.933524] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03358db-9df2-435f-a3c5-638eeb653a71 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.941243] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e17818-3f85-498e-9055-89473ff3677e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.970990] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180954MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1254.971160] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.971342] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.996856] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.997182] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 821aa0cb-1947-46fe-bc0a-4900baa8cf82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.997324] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1255.997490] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1256.033936] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5698f43b-2fad-4a3e-914f-737191c7b7a8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.041903] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749cbeec-1ca9-44ab-af68-86ce1ea13ce6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.070688] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3db248-29f1-4a4e-b797-8919046a9838 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.077994] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e8d767-9fe8-43ba-93c9-2754f069b039 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.091714] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.594944] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1257.099777] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1257.100222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.129s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.347989] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "cf487ed2-ae22-4867-8987-86480ac8e07a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.348330] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.850821] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Starting instance... {{(pid=61905) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1261.373268] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.373569] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.375065] env[61905]: INFO nova.compute.claims [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1262.434051] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd350e9-5370-4a71-8900-92a6abef5354 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.442516] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0619f718-e121-43ad-8bd4-d5cf5f8d81ba {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.473570] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d5cb5c-9384-47b1-ac6c-17e961b32ca0 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.481396] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b462172-5e3e-40f6-8255-37fbb13c5f9e {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.495022] env[61905]: DEBUG nova.compute.provider_tree [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1262.998729] env[61905]: DEBUG nova.scheduler.client.report [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1263.503752] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.130s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.504202] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Start building networks asynchronously for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1264.008931] env[61905]: DEBUG nova.compute.utils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Using /dev/sd instead of None {{(pid=61905) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1264.010356] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Allocating IP information in the background. {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1264.010525] env[61905]: DEBUG nova.network.neutron [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] allocate_for_instance() {{(pid=61905) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1264.058158] env[61905]: DEBUG nova.policy [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b126f47b9df4f4586f377f70faada62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edbab61ee8984d0c91eab473eba0047c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61905) authorize /opt/stack/nova/nova/policy.py:201}} [ 1264.313012] env[61905]: DEBUG nova.network.neutron [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Successfully created port: 3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1264.514102] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Start building block device mappings for instance. {{(pid=61905) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1265.018968] env[61905]: INFO nova.virt.block_device [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Booting with volume bed84cb4-b376-46a7-9c2f-9699deeee48e at /dev/sda [ 1265.053746] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2551789c-f052-4d1e-8d39-97982ce375a4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.063810] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c4b9cd-39a1-4433-941c-8122031d23c8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.089710] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8323344-4bf5-43a6-b77b-50e20e9db946 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.098918] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188fc2b4-96da-4f7e-b72c-c0357305c64a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.127374] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c518e69-4697-4b69-95da-f3da32615588 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.133861] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-919b8721-0651-4645-8757-d47111c87562 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.147467] env[61905]: DEBUG nova.virt.block_device [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating existing volume attachment record: fc328629-f635-4275-b29c-6effe85c6399 {{(pid=61905) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1265.674481] env[61905]: DEBUG nova.compute.manager [req-851a741c-d925-4776-a579-f58d57c145d0 req-465f0d36-9b0b-4f65-900b-66b25995030c service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Received event network-vif-plugged-3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1265.674751] env[61905]: DEBUG oslo_concurrency.lockutils [req-851a741c-d925-4776-a579-f58d57c145d0 req-465f0d36-9b0b-4f65-900b-66b25995030c service nova] Acquiring lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.675041] env[61905]: DEBUG oslo_concurrency.lockutils [req-851a741c-d925-4776-a579-f58d57c145d0 req-465f0d36-9b0b-4f65-900b-66b25995030c service nova] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.675123] env[61905]: DEBUG oslo_concurrency.lockutils [req-851a741c-d925-4776-a579-f58d57c145d0 req-465f0d36-9b0b-4f65-900b-66b25995030c service nova] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.675246] env[61905]: DEBUG nova.compute.manager [req-851a741c-d925-4776-a579-f58d57c145d0 req-465f0d36-9b0b-4f65-900b-66b25995030c service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] No waiting events found dispatching network-vif-plugged-3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1265.675440] env[61905]: WARNING nova.compute.manager [req-851a741c-d925-4776-a579-f58d57c145d0 req-465f0d36-9b0b-4f65-900b-66b25995030c service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Received unexpected event network-vif-plugged-3c103fdf-7451-4fc9-9e07-99c09eaf4765 for instance with vm_state building and task_state block_device_mapping. [ 1265.752881] env[61905]: DEBUG nova.network.neutron [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Successfully updated port: 3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1266.256052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.256052] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.256052] env[61905]: DEBUG nova.network.neutron [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1266.786202] env[61905]: DEBUG nova.network.neutron [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Instance cache missing network info. {{(pid=61905) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1266.904732] env[61905]: DEBUG nova.network.neutron [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.223349] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Start spawning the instance on the hypervisor. {{(pid=61905) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1267.224014] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1267.224275] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1267.224474] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1267.224705] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1267.224895] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1267.225075] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1267.225330] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1267.225520] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1267.225728] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1267.225931] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1267.226161] env[61905]: DEBUG nova.virt.hardware [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1267.227551] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ddd998-a2b1-498f-b390-e528b43cfd40 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.236510] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4caf8d2-7ae2-4a23-9dc5-747df138c0f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.407705] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1267.407995] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Instance network_info: |[{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61905) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1267.408462] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:ae:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ebf80ce9-9885-46ed-ac23-310a98789a95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c103fdf-7451-4fc9-9e07-99c09eaf4765', 'vif_model': 'vmxnet3'}] {{(pid=61905) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1267.415849] env[61905]: DEBUG oslo.service.loopingcall [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1267.416075] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Creating VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1267.416308] env[61905]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a956c57e-b6d8-4aba-90e5-eac7d5b616cd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.436874] env[61905]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1267.436874] env[61905]: value = "task-1363080" [ 1267.436874] env[61905]: _type = "Task" [ 1267.436874] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.444620] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363080, 'name': CreateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.701133] env[61905]: DEBUG nova.compute.manager [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Received event network-changed-3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1267.701335] env[61905]: DEBUG nova.compute.manager [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Refreshing instance network info cache due to event network-changed-3c103fdf-7451-4fc9-9e07-99c09eaf4765. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1267.701557] env[61905]: DEBUG oslo_concurrency.lockutils [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] Acquiring lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.701702] env[61905]: DEBUG oslo_concurrency.lockutils [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] Acquired lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.701864] env[61905]: DEBUG nova.network.neutron [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Refreshing network info cache for port 3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1267.947416] env[61905]: DEBUG oslo_vmware.api [-] Task: {'id': task-1363080, 'name': CreateVM_Task, 'duration_secs': 0.2983} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.947802] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Created VM on the ESX host {{(pid=61905) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1267.948211] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'attachment_id': 'fc328629-f635-4275-b29c-6effe85c6399', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290125', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'name': 'volume-bed84cb4-b376-46a7-9c2f-9699deeee48e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf487ed2-ae22-4867-8987-86480ac8e07a', 'attached_at': '', 'detached_at': '', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'serial': 'bed84cb4-b376-46a7-9c2f-9699deeee48e'}, 'device_type': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=61905) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1267.948425] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Root volume attach. Driver type: vmdk {{(pid=61905) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1267.949181] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73941fdf-ef43-4a95-a984-c4002cadd043 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.956483] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62fcaf9-4007-427c-a2f5-60aa15efc4f7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.962763] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1b5964-1eec-45b8-acc2-7b447ef839a2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.968645] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e03bb7ac-c3fc-4e2a-98b6-70fc3ef7ca18 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.976219] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1267.976219] env[61905]: value = "task-1363081" [ 1267.976219] env[61905]: _type = "Task" [ 1267.976219] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.983390] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.444605] env[61905]: DEBUG nova.network.neutron [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updated VIF entry in instance network info cache for port 3c103fdf-7451-4fc9-9e07-99c09eaf4765. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1268.444975] env[61905]: DEBUG nova.network.neutron [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1268.487172] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task} progress is 43%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.948156] env[61905]: DEBUG oslo_concurrency.lockutils [req-20194e96-33a8-452f-b4fd-9b012fd96487 req-44901f48-3a1f-4bf2-9661-6e6931cbf0f9 service nova] Releasing lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1268.988773] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task} progress is 58%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.489996] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task} progress is 73%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.990914] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task} progress is 88%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.491250] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task} progress is 97%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.991752] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363081, 'name': RelocateVM_Task, 'duration_secs': 2.980314} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.992167] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Volume attach. Driver type: vmdk {{(pid=61905) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1270.992304] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290125', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'name': 'volume-bed84cb4-b376-46a7-9c2f-9699deeee48e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf487ed2-ae22-4867-8987-86480ac8e07a', 'attached_at': '', 'detached_at': '', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'serial': 'bed84cb4-b376-46a7-9c2f-9699deeee48e'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1270.993110] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05745bdc-96ea-4826-96d2-77fc474f892d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.008219] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8de741-0662-47e2-ba17-331d09f401b7 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.029985] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-bed84cb4-b376-46a7-9c2f-9699deeee48e/volume-bed84cb4-b376-46a7-9c2f-9699deeee48e.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1271.030245] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a53bce98-2725-43f3-a3d4-bc4fcb48397a {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.049960] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1271.049960] env[61905]: value = "task-1363082" [ 1271.049960] env[61905]: _type = "Task" [ 1271.049960] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.057808] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.561013] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363082, 'name': ReconfigVM_Task, 'duration_secs': 0.297826} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.561354] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-bed84cb4-b376-46a7-9c2f-9699deeee48e/volume-bed84cb4-b376-46a7-9c2f-9699deeee48e.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.566047] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-305f1f57-b407-48f2-b759-2810738e6efa {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.580865] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1271.580865] env[61905]: value = "task-1363083" [ 1271.580865] env[61905]: _type = "Task" [ 1271.580865] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.588613] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363083, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.091033] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363083, 'name': ReconfigVM_Task, 'duration_secs': 0.132863} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.091463] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290125', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'name': 'volume-bed84cb4-b376-46a7-9c2f-9699deeee48e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cf487ed2-ae22-4867-8987-86480ac8e07a', 'attached_at': '', 'detached_at': '', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'serial': 'bed84cb4-b376-46a7-9c2f-9699deeee48e'} {{(pid=61905) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1272.092021] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d42e71c-c48c-48c2-8290-d986cff0a730 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.098966] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1272.098966] env[61905]: value = "task-1363084" [ 1272.098966] env[61905]: _type = "Task" [ 1272.098966] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.106379] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363084, 'name': Rename_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.610027] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363084, 'name': Rename_Task, 'duration_secs': 0.132921} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.610463] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.610722] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5330fb07-477c-440c-927d-dc8b494c3ce3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.618066] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1272.618066] env[61905]: value = "task-1363085" [ 1272.618066] env[61905]: _type = "Task" [ 1272.618066] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.627427] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.128936] env[61905]: DEBUG oslo_vmware.api [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363085, 'name': PowerOnVM_Task, 'duration_secs': 0.418183} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.129325] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.129438] env[61905]: INFO nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1273.129629] env[61905]: DEBUG nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Checking state {{(pid=61905) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1273.130425] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3009cb5-913f-4d42-af85-c6a8a5718455 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.647864] env[61905]: INFO nova.compute.manager [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Took 12.29 seconds to build instance. [ 1274.150489] env[61905]: DEBUG oslo_concurrency.lockutils [None req-4baeab28-79f1-456d-8556-999540e77d5f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.802s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.648278] env[61905]: DEBUG nova.compute.manager [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Received event network-changed-a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1274.648479] env[61905]: DEBUG nova.compute.manager [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Refreshing instance network info cache due to event network-changed-a105c9d5-8ba9-40c5-ba4c-a35528f5779b. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1274.648688] env[61905]: DEBUG oslo_concurrency.lockutils [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] Acquiring lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.648837] env[61905]: DEBUG oslo_concurrency.lockutils [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] Acquired lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.648996] env[61905]: DEBUG nova.network.neutron [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Refreshing network info cache for port a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1275.374201] env[61905]: DEBUG nova.network.neutron [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updated VIF entry in instance network info cache for port a105c9d5-8ba9-40c5-ba4c-a35528f5779b. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1275.374588] env[61905]: DEBUG nova.network.neutron [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.877376] env[61905]: DEBUG oslo_concurrency.lockutils [req-8ad92339-0447-4010-8101-e480c67fae6e req-739bcd5d-0cad-4297-a032-e675bd5e5b4a service nova] Releasing lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.177099] env[61905]: DEBUG nova.compute.manager [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Stashing vm_state: active {{(pid=61905) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1276.674308] env[61905]: DEBUG nova.compute.manager [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Received event network-changed-3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1276.674308] env[61905]: DEBUG nova.compute.manager [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Refreshing instance network info cache due to event network-changed-3c103fdf-7451-4fc9-9e07-99c09eaf4765. {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1276.674308] env[61905]: DEBUG oslo_concurrency.lockutils [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] Acquiring lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.674668] env[61905]: DEBUG oslo_concurrency.lockutils [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] Acquired lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.674668] env[61905]: DEBUG nova.network.neutron [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Refreshing network info cache for port 3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1276.694872] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.695144] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.199865] env[61905]: INFO nova.compute.claims [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1277.378759] env[61905]: DEBUG nova.network.neutron [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updated VIF entry in instance network info cache for port 3c103fdf-7451-4fc9-9e07-99c09eaf4765. {{(pid=61905) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1277.379150] env[61905]: DEBUG nova.network.neutron [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.705915] env[61905]: INFO nova.compute.resource_tracker [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating resource usage from migration 38f73554-0c9c-40aa-9e44-aaec52d33c30 [ 1277.765875] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109761d8-4389-429c-98ff-7ed43872535f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.775114] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8742b9b-0a00-4bbc-badc-de06c1919e97 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.805492] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a504c6a-8508-4014-82ef-41a2c2f1c266 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.813610] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24f4a53-a98e-481b-ad0f-24335a88d320 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.826956] env[61905]: DEBUG nova.compute.provider_tree [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.881692] env[61905]: DEBUG oslo_concurrency.lockutils [req-e63e544d-770b-43a4-87f1-c5dffb685dbe req-660e7bbf-61ee-40e4-9f60-bdcdad230f6f service nova] Releasing lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.330853] env[61905]: DEBUG nova.scheduler.client.report [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1278.836115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.141s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.836505] env[61905]: INFO nova.compute.manager [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Migrating [ 1279.350833] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.351068] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.351296] env[61905]: DEBUG nova.network.neutron [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.048235] env[61905]: DEBUG nova.network.neutron [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.550684] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.068453] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52c3840-6453-4765-b74f-ad3370442cdf {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.088825] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 0 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1282.595164] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1282.595496] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7d22b35-a0e5-43aa-b770-cfcd650fd425 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.603622] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1282.603622] env[61905]: value = "task-1363086" [ 1282.603622] env[61905]: _type = "Task" [ 1282.603622] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.613042] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.114172] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363086, 'name': PowerOffVM_Task, 'duration_secs': 0.195774} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.114509] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1283.114635] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 17 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1283.621876] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:12:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1283.622170] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1283.622316] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1283.622500] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1283.622647] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1283.622793] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1283.622994] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1283.623216] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1283.623391] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1283.623561] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1283.623735] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1283.628888] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff8db27a-1799-449c-9406-54f9a768ac32 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.650633] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1283.650633] env[61905]: value = "task-1363087" [ 1283.650633] env[61905]: _type = "Task" [ 1283.650633] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.659690] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.160237] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363087, 'name': ReconfigVM_Task, 'duration_secs': 0.140899} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.160599] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 33 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1284.667019] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T05:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1284.667335] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1284.667441] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image limits 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1284.667627] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Flavor pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1284.667777] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Image pref 0:0:0 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1284.667926] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61905) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1284.668141] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1284.668308] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1284.668479] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Got 1 possible topologies {{(pid=61905) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1284.668642] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1284.668816] env[61905]: DEBUG nova.virt.hardware [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61905) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1284.674089] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1284.674401] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f58e2e16-4a61-4b57-8fbe-caef1c37c8fc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.694376] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1284.694376] env[61905]: value = "task-1363088" [ 1284.694376] env[61905]: _type = "Task" [ 1284.694376] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.701796] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363088, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.203545] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363088, 'name': ReconfigVM_Task, 'duration_secs': 0.146365} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.203840] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1285.204942] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e722a8d6-c9c7-404e-9fa3-c33944a3e4f1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.225368] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-bed84cb4-b376-46a7-9c2f-9699deeee48e/volume-bed84cb4-b376-46a7-9c2f-9699deeee48e.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.225932] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3299e8d-3f65-4513-8b4a-e1e9c0331175 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.243427] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1285.243427] env[61905]: value = "task-1363089" [ 1285.243427] env[61905]: _type = "Task" [ 1285.243427] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.251819] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363089, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.753378] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363089, 'name': ReconfigVM_Task, 'duration_secs': 0.243875} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.753661] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-bed84cb4-b376-46a7-9c2f-9699deeee48e/volume-bed84cb4-b376-46a7-9c2f-9699deeee48e.vmdk or device None with type thin {{(pid=61905) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1285.753911] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 50 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1286.260460] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2245e4a9-d83e-4d52-ab9d-a1246c287852 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.278673] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecfff9d-7a3e-4ab4-8b08-6c64e08f38ae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.295381] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 67 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1287.945298] env[61905]: DEBUG nova.network.neutron [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Port 3c103fdf-7451-4fc9-9e07-99c09eaf4765 binding to destination host cpu-1 is already ACTIVE {{(pid=61905) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1288.965433] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.965860] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.965860] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.000320] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.000677] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.000900] env[61905]: DEBUG nova.network.neutron [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1290.707632] env[61905]: DEBUG nova.network.neutron [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.210672] env[61905]: DEBUG oslo_concurrency.lockutils [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.719960] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b859a4b-a58a-43a3-8d34-0d9a1ddb4f55 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.726818] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d183e54e-a3b8-4110-ae12-49c99f3d2704 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.820679] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b631e8e-a86b-40d3-b5ec-60a508797fae {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.839634] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5016811c-64d0-44a1-8fe6-7b5e1b9c1beb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.846078] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 83 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1293.351901] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powering on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1293.352236] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9f0101a-ecab-4fd2-956d-3bd7c2d6ef42 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.359759] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1293.359759] env[61905]: value = "task-1363090" [ 1293.359759] env[61905]: _type = "Task" [ 1293.359759] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.367415] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.870248] env[61905]: DEBUG oslo_vmware.api [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363090, 'name': PowerOnVM_Task, 'duration_secs': 0.35662} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.870631] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powered on the VM {{(pid=61905) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1293.870767] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-41b57449-1a14-4118-bb8a-750e8372cc51 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance 'cf487ed2-ae22-4867-8987-86480ac8e07a' progress to 100 {{(pid=61905) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1296.492454] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "cf487ed2-ae22-4867-8987-86480ac8e07a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.492821] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.492883] env[61905]: DEBUG nova.compute.manager [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Going to confirm migration 4 {{(pid=61905) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1297.066634] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.066864] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquired lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.067098] env[61905]: DEBUG nova.network.neutron [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Building network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1297.067331] env[61905]: DEBUG nova.objects.instance [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'info_cache' on Instance uuid cf487ed2-ae22-4867-8987-86480ac8e07a {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1298.336017] env[61905]: DEBUG nova.network.neutron [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [{"id": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "address": "fa:16:3e:4c:ae:81", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c103fdf-74", "ovs_interfaceid": "3c103fdf-7451-4fc9-9e07-99c09eaf4765", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.838923] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Releasing lock "refresh_cache-cf487ed2-ae22-4867-8987-86480ac8e07a" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1298.839236] env[61905]: DEBUG nova.objects.instance [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'migration_context' on Instance uuid cf487ed2-ae22-4867-8987-86480ac8e07a {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.341900] env[61905]: DEBUG nova.objects.base [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61905) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1299.343299] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd724f81-7941-4b40-87fc-1e06098b6bc2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.363421] env[61905]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-909b2155-b2fa-455c-af0d-85641bc11777 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.368602] env[61905]: DEBUG oslo_vmware.api [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1299.368602] env[61905]: value = "session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f51a79-5da9-5bfd-8833-c1a8527e55c1" [ 1299.368602] env[61905]: _type = "Task" [ 1299.368602] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.376247] env[61905]: DEBUG oslo_vmware.api [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f51a79-5da9-5bfd-8833-c1a8527e55c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.878503] env[61905]: DEBUG oslo_vmware.api [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': session[5285e1db-81ef-4bce-e0c1-df9bdfbe9ccd]52f51a79-5da9-5bfd-8833-c1a8527e55c1, 'name': SearchDatastore_Task, 'duration_secs': 0.008281} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.878992] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.879144] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.442560] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3027cfb8-b052-403a-bc38-accd746225fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.450269] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8e9217-4359-4076-929d-497cc915bdcc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.478337] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1c71bf-701b-49bf-a7e3-b4ce65c456de {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.485268] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6db3f9-34cb-437e-bc64-2cb1509dddd2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.497597] env[61905]: DEBUG nova.compute.provider_tree [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.000559] env[61905]: DEBUG nova.scheduler.client.report [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1302.010994] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.132s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.568936] env[61905]: INFO nova.scheduler.client.report [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted allocation for migration 38f73554-0c9c-40aa-9e44-aaec52d33c30 [ 1302.908018] env[61905]: INFO nova.compute.manager [None req-b6f849ae-5960-4c93-833d-0b506d5eb785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Get console output [ 1302.908365] env[61905]: WARNING nova.virt.vmwareapi.driver [None req-b6f849ae-5960-4c93-833d-0b506d5eb785 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] The console log is missing. Check your VSPC configuration [ 1303.074832] env[61905]: DEBUG oslo_concurrency.lockutils [None req-3a124a0d-bdf8-4d22-8a94-b16ffe82a76f tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.582s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.101329] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.101770] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.101770] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.101894] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.101992] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1311.401227] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.403789] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.403936] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1311.404072] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Rebuilding the list of instances to heal {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1311.933567] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.933765] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquired lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.933881] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Forcefully refreshing network info cache for instance {{(pid=61905) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1311.934060] env[61905]: DEBUG nova.objects.instance [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lazy-loading 'info_cache' on Instance uuid bcca8c7b-3e80-4895-ac56-d5aa05d482e5 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1313.623408] env[61905]: DEBUG nova.network.neutron [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [{"id": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "address": "fa:16:3e:e0:49:6b", "network": {"id": "cb6f9cd4-5962-4e07-bf35-b21ec4506540", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2042579240-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "edbab61ee8984d0c91eab473eba0047c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ebf80ce9-9885-46ed-ac23-310a98789a95", "external-id": "nsx-vlan-transportzone-582", "segmentation_id": 582, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa105c9d5-8b", "ovs_interfaceid": "a105c9d5-8ba9-40c5-ba4c-a35528f5779b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.126166] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Releasing lock "refresh_cache-bcca8c7b-3e80-4895-ac56-d5aa05d482e5" {{(pid=61905) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.126396] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updated the network info_cache for instance {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1314.126613] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.126764] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.404235] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.908266] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.410891] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.411130] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.411257] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1315.411408] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61905) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1315.412329] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4333fddd-c661-4398-8ee5-cb32617bdfa5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.420282] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd4d5b3-3e55-4624-ac77-b13d5546f323 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.433459] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7c61df-95f4-47e9-a15d-25f28a2eaabd {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.439723] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd04bb04-afa7-47d3-aa5c-4cb86a1c3720 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.466896] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181035MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61905) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1315.467044] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.467222] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.491012] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.491282] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance 821aa0cb-1947-46fe-bc0a-4900baa8cf82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.491341] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Instance cf487ed2-ae22-4867-8987-86480ac8e07a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61905) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.491473] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1316.491634] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61905) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1316.534946] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28d4f22-96af-4238-8532-6268713cd510 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.542331] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57122571-7f9d-4dab-abb0-f6b3cdc0b1f8 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.571885] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c391308-383e-4940-9a2b-29387881b2e6 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.578809] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59f2010-3a53-4123-8edf-2ccd25621828 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.592461] env[61905]: DEBUG nova.compute.provider_tree [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.095971] env[61905]: DEBUG nova.scheduler.client.report [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1317.601186] env[61905]: DEBUG nova.compute.resource_tracker [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61905) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1317.601555] env[61905]: DEBUG oslo_concurrency.lockutils [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.134s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.848053] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "cf487ed2-ae22-4867-8987-86480ac8e07a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.848053] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.848053] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.848532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.848532] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.850674] env[61905]: INFO nova.compute.manager [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Terminating instance [ 1330.852554] env[61905]: DEBUG nova.compute.manager [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1330.852767] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1330.853008] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef858b9c-8d60-463b-b521-4f21018286fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.859960] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1330.859960] env[61905]: value = "task-1363091" [ 1330.859960] env[61905]: _type = "Task" [ 1330.859960] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.867553] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.369827] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363091, 'name': PowerOffVM_Task, 'duration_secs': 0.190774} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.370080] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1331.370330] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Volume detach. Driver type: vmdk {{(pid=61905) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1331.370526] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290125', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'name': 'volume-bed84cb4-b376-46a7-9c2f-9699deeee48e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'cf487ed2-ae22-4867-8987-86480ac8e07a', 'attached_at': '2024-10-25T05:26:42.000000', 'detached_at': '', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'serial': 'bed84cb4-b376-46a7-9c2f-9699deeee48e'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1331.371313] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdb523b-f1ee-4e5d-8f72-68ad43481e6f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.390119] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae177710-5a16-4370-9b98-2cc0b3fce0f9 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.396246] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e846cb0-fc64-4904-9192-2a8fd25b3b53 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.412991] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfe9c7c-ee5a-4f71-a36a-bdfb20540c98 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.426825] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] The volume has not been displaced from its original location: [datastore1] volume-bed84cb4-b376-46a7-9c2f-9699deeee48e/volume-bed84cb4-b376-46a7-9c2f-9699deeee48e.vmdk. No consolidation needed. {{(pid=61905) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1331.432063] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1331.432318] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad0653e0-39e2-47a1-8a1e-7d92793ac460 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.448693] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1331.448693] env[61905]: value = "task-1363092" [ 1331.448693] env[61905]: _type = "Task" [ 1331.448693] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.455601] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.958610] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363092, 'name': ReconfigVM_Task, 'duration_secs': 0.141078} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.959074] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=61905) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1331.963321] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99d59a11-49ca-45c8-a991-4cd7940c6db3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.978231] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1331.978231] env[61905]: value = "task-1363093" [ 1331.978231] env[61905]: _type = "Task" [ 1331.978231] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.985454] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.488300] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363093, 'name': ReconfigVM_Task, 'duration_secs': 0.114916} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.488624] env[61905]: DEBUG nova.virt.vmwareapi.volumeops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290125', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'name': 'volume-bed84cb4-b376-46a7-9c2f-9699deeee48e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'cf487ed2-ae22-4867-8987-86480ac8e07a', 'attached_at': '2024-10-25T05:26:42.000000', 'detached_at': '', 'volume_id': 'bed84cb4-b376-46a7-9c2f-9699deeee48e', 'serial': 'bed84cb4-b376-46a7-9c2f-9699deeee48e'} {{(pid=61905) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1332.488902] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1332.489667] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02050651-ffa4-492f-9e43-d74011071030 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.496126] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1332.496349] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32be27b8-2183-4206-b2a0-936abbeace6f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.555465] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1332.555678] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Deleting contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1332.555838] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore1] cf487ed2-ae22-4867-8987-86480ac8e07a {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1332.556097] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a8151f2-b5bb-4fe3-b682-37249a1425a1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.561913] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1332.561913] env[61905]: value = "task-1363095" [ 1332.561913] env[61905]: _type = "Task" [ 1332.561913] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.568731] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.072607] env[61905]: DEBUG oslo_vmware.api [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072495} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.072984] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.073073] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Deleted contents of the VM from datastore datastore1 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.073215] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.073396] env[61905]: INFO nova.compute.manager [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Took 2.22 seconds to destroy the instance on the hypervisor. [ 1333.073649] env[61905]: DEBUG oslo.service.loopingcall [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.073871] env[61905]: DEBUG nova.compute.manager [-] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1333.073972] env[61905]: DEBUG nova.network.neutron [-] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1333.481991] env[61905]: DEBUG nova.compute.manager [req-57cec6be-d2f9-4ac8-89ac-8b374e27ab5c req-e840b229-74cf-4a7e-be1a-1de91998bc6a service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Received event network-vif-deleted-3c103fdf-7451-4fc9-9e07-99c09eaf4765 {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1333.482933] env[61905]: INFO nova.compute.manager [req-57cec6be-d2f9-4ac8-89ac-8b374e27ab5c req-e840b229-74cf-4a7e-be1a-1de91998bc6a service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Neutron deleted interface 3c103fdf-7451-4fc9-9e07-99c09eaf4765; detaching it from the instance and deleting it from the info cache [ 1333.482933] env[61905]: DEBUG nova.network.neutron [req-57cec6be-d2f9-4ac8-89ac-8b374e27ab5c req-e840b229-74cf-4a7e-be1a-1de91998bc6a service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.961998] env[61905]: DEBUG nova.network.neutron [-] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.985176] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26b8b91a-341f-453a-9cf1-0b3b0a54c445 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.996813] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421fc3e9-6da1-4a17-9eff-554dff2e6865 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.019640] env[61905]: DEBUG nova.compute.manager [req-57cec6be-d2f9-4ac8-89ac-8b374e27ab5c req-e840b229-74cf-4a7e-be1a-1de91998bc6a service nova] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Detach interface failed, port_id=3c103fdf-7451-4fc9-9e07-99c09eaf4765, reason: Instance cf487ed2-ae22-4867-8987-86480ac8e07a could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1334.463620] env[61905]: INFO nova.compute.manager [-] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Took 1.39 seconds to deallocate network for instance. [ 1335.007703] env[61905]: INFO nova.compute.manager [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Took 0.54 seconds to detach 1 volumes for instance. [ 1335.009934] env[61905]: DEBUG nova.compute.manager [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: cf487ed2-ae22-4867-8987-86480ac8e07a] Deleting volume: bed84cb4-b376-46a7-9c2f-9699deeee48e {{(pid=61905) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1335.549084] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.549518] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.549663] env[61905]: DEBUG nova.objects.instance [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'resources' on Instance uuid cf487ed2-ae22-4867-8987-86480ac8e07a {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1336.108409] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98772af-b91e-4ed4-b412-131e8c48e643 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.116206] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80a9f64-6352-4fd0-9686-9b7fceb3d972 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.148221] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7c3212-6039-49bb-9800-c3d1396100a2 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.155424] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d654bd46-b4d2-4500-8b00-b8919f8903ce {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.168171] env[61905]: DEBUG nova.compute.provider_tree [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.671865] env[61905]: DEBUG nova.scheduler.client.report [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.177654] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.195032] env[61905]: INFO nova.scheduler.client.report [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted allocations for instance cf487ed2-ae22-4867-8987-86480ac8e07a [ 1337.702115] env[61905]: DEBUG oslo_concurrency.lockutils [None req-bae883a6-6710-4048-8dc4-6bde708cfa3a tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "cf487ed2-ae22-4867-8987-86480ac8e07a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.854s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.769423] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.769780] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.770078] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.770385] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.770658] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.773362] env[61905]: INFO nova.compute.manager [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Terminating instance [ 1338.775657] env[61905]: DEBUG nova.compute.manager [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1338.775935] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1338.777134] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd173f8-476f-479a-9c1e-b900bde8cfdc {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.787329] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1338.787651] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d5883b1-4f68-49d3-855f-9da5aaadc8fb {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.795973] env[61905]: DEBUG oslo_vmware.api [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1338.795973] env[61905]: value = "task-1363097" [ 1338.795973] env[61905]: _type = "Task" [ 1338.795973] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.805841] env[61905]: DEBUG oslo_vmware.api [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.306673] env[61905]: DEBUG oslo_vmware.api [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363097, 'name': PowerOffVM_Task, 'duration_secs': 0.181927} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.306944] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1339.307137] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1339.307391] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6752da4-a8ed-4911-872d-6f03010f0327 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.366438] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1339.366702] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1339.366849] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore2] 821aa0cb-1947-46fe-bc0a-4900baa8cf82 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1339.367173] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd43125a-b661-473a-9462-303b93133883 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.373346] env[61905]: DEBUG oslo_vmware.api [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1339.373346] env[61905]: value = "task-1363099" [ 1339.373346] env[61905]: _type = "Task" [ 1339.373346] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.380632] env[61905]: DEBUG oslo_vmware.api [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.883124] env[61905]: DEBUG oslo_vmware.api [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125735} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.883477] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1339.883615] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1339.883720] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1339.883924] env[61905]: INFO nova.compute.manager [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1339.884181] env[61905]: DEBUG oslo.service.loopingcall [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1339.884374] env[61905]: DEBUG nova.compute.manager [-] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1339.884468] env[61905]: DEBUG nova.network.neutron [-] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1340.113615] env[61905]: DEBUG nova.compute.manager [req-1b189c4c-c7bf-478a-9105-7cdf32b8bcb9 req-810dbf75-ca5a-47f6-a622-a564b6a67a09 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Received event network-vif-deleted-5121d3e3-74ae-4238-b6a6-5f789158dbeb {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1340.113782] env[61905]: INFO nova.compute.manager [req-1b189c4c-c7bf-478a-9105-7cdf32b8bcb9 req-810dbf75-ca5a-47f6-a622-a564b6a67a09 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Neutron deleted interface 5121d3e3-74ae-4238-b6a6-5f789158dbeb; detaching it from the instance and deleting it from the info cache [ 1340.113996] env[61905]: DEBUG nova.network.neutron [req-1b189c4c-c7bf-478a-9105-7cdf32b8bcb9 req-810dbf75-ca5a-47f6-a622-a564b6a67a09 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.586667] env[61905]: DEBUG nova.network.neutron [-] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.616342] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-636da729-f619-49db-b72d-69ed0986dc03 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.628860] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6a22af-6389-4f8c-9e9f-5cba159080a3 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.661311] env[61905]: DEBUG nova.compute.manager [req-1b189c4c-c7bf-478a-9105-7cdf32b8bcb9 req-810dbf75-ca5a-47f6-a622-a564b6a67a09 service nova] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Detach interface failed, port_id=5121d3e3-74ae-4238-b6a6-5f789158dbeb, reason: Instance 821aa0cb-1947-46fe-bc0a-4900baa8cf82 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1341.089895] env[61905]: INFO nova.compute.manager [-] [instance: 821aa0cb-1947-46fe-bc0a-4900baa8cf82] Took 1.21 seconds to deallocate network for instance. [ 1341.596047] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.596333] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.596553] env[61905]: DEBUG nova.objects.instance [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'resources' on Instance uuid 821aa0cb-1947-46fe-bc0a-4900baa8cf82 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1342.138312] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db01456c-a713-4554-8275-72654f521b8b {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.145615] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ace67e-e1a1-439f-bf53-49d55c306a26 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.175403] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cae0a0d-924d-429b-8698-980191daf8f4 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.182622] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b230fe4-fb28-4553-97da-9191326c7c74 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.195212] env[61905]: DEBUG nova.compute.provider_tree [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.698614] env[61905]: DEBUG nova.scheduler.client.report [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1343.204020] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.222712] env[61905]: INFO nova.scheduler.client.report [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted allocations for instance 821aa0cb-1947-46fe-bc0a-4900baa8cf82 [ 1343.731810] env[61905]: DEBUG oslo_concurrency.lockutils [None req-ccf17f14-7971-49c9-91cd-93b2dcb10448 tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "821aa0cb-1947-46fe-bc0a-4900baa8cf82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.962s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.369689] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.370174] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.370259] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.370399] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.370573] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.372861] env[61905]: INFO nova.compute.manager [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Terminating instance [ 1347.374663] env[61905]: DEBUG nova.compute.manager [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Start destroying the instance on the hypervisor. {{(pid=61905) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1347.374857] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Destroying instance {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1347.375722] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5563e459-d089-440e-a2da-e87494bfef84 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.383297] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Powering off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1347.383520] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3321642-7571-4231-be88-a7bb0dc4cee5 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.390344] env[61905]: DEBUG oslo_vmware.api [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1347.390344] env[61905]: value = "task-1363101" [ 1347.390344] env[61905]: _type = "Task" [ 1347.390344] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.397720] env[61905]: DEBUG oslo_vmware.api [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.900192] env[61905]: DEBUG oslo_vmware.api [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363101, 'name': PowerOffVM_Task, 'duration_secs': 0.186029} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.900466] env[61905]: DEBUG nova.virt.vmwareapi.vm_util [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Powered off the VM {{(pid=61905) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1347.900637] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Unregistering the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1347.900878] env[61905]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-565ee78b-d31f-4318-9a5f-8f2054f86637 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.962573] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Unregistered the VM {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1347.962773] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Deleting contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1347.962960] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleting the datastore file [datastore2] bcca8c7b-3e80-4895-ac56-d5aa05d482e5 {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1347.963247] env[61905]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa3b4730-f5f6-49b7-86a3-a176dce4ad5f {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.969297] env[61905]: DEBUG oslo_vmware.api [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for the task: (returnval){ [ 1347.969297] env[61905]: value = "task-1363103" [ 1347.969297] env[61905]: _type = "Task" [ 1347.969297] env[61905]: } to complete. {{(pid=61905) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.976612] env[61905]: DEBUG oslo_vmware.api [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363103, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.479393] env[61905]: DEBUG oslo_vmware.api [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Task: {'id': task-1363103, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127885} completed successfully. {{(pid=61905) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.479754] env[61905]: DEBUG nova.virt.vmwareapi.ds_util [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted the datastore file {{(pid=61905) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1348.479809] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Deleted contents of the VM from datastore datastore2 {{(pid=61905) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1348.479983] env[61905]: DEBUG nova.virt.vmwareapi.vmops [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Instance destroyed {{(pid=61905) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1348.480180] env[61905]: INFO nova.compute.manager [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1348.480419] env[61905]: DEBUG oslo.service.loopingcall [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61905) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1348.480610] env[61905]: DEBUG nova.compute.manager [-] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Deallocating network for instance {{(pid=61905) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1348.480703] env[61905]: DEBUG nova.network.neutron [-] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] deallocate_for_instance() {{(pid=61905) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1348.716030] env[61905]: DEBUG nova.compute.manager [req-446a27c8-6c43-44f8-aeb8-abddcca62463 req-3e7172e3-b07b-4548-ae39-d2386cea1cec service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Received event network-vif-deleted-a105c9d5-8ba9-40c5-ba4c-a35528f5779b {{(pid=61905) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1348.716273] env[61905]: INFO nova.compute.manager [req-446a27c8-6c43-44f8-aeb8-abddcca62463 req-3e7172e3-b07b-4548-ae39-d2386cea1cec service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Neutron deleted interface a105c9d5-8ba9-40c5-ba4c-a35528f5779b; detaching it from the instance and deleting it from the info cache [ 1348.716422] env[61905]: DEBUG nova.network.neutron [req-446a27c8-6c43-44f8-aeb8-abddcca62463 req-3e7172e3-b07b-4548-ae39-d2386cea1cec service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.194309] env[61905]: DEBUG nova.network.neutron [-] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Updating instance_info_cache with network_info: [] {{(pid=61905) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.218545] env[61905]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7dfcd28f-380a-408e-8cc0-e121f6e7d061 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.228561] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcdde59-6df6-4165-8562-c8aed30bf65d {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.249421] env[61905]: DEBUG nova.compute.manager [req-446a27c8-6c43-44f8-aeb8-abddcca62463 req-3e7172e3-b07b-4548-ae39-d2386cea1cec service nova] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Detach interface failed, port_id=a105c9d5-8ba9-40c5-ba4c-a35528f5779b, reason: Instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 could not be found. {{(pid=61905) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1349.696984] env[61905]: INFO nova.compute.manager [-] [instance: bcca8c7b-3e80-4895-ac56-d5aa05d482e5] Took 1.22 seconds to deallocate network for instance. [ 1350.204901] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.205200] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.205432] env[61905]: DEBUG nova.objects.instance [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lazy-loading 'resources' on Instance uuid bcca8c7b-3e80-4895-ac56-d5aa05d482e5 {{(pid=61905) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1350.738205] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49923de-d8cc-4264-9ce0-bb186a6f82ed {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.745894] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47734b9-1ab1-478f-a55b-e5f24ad482ec {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.775337] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4368f333-80aa-4d35-98ac-185bf5b8cb14 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.782915] env[61905]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5909e034-5359-47ff-93d2-2a3ac9540fd1 {{(pid=61905) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.795868] env[61905]: DEBUG nova.compute.provider_tree [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed in ProviderTree for provider: 9cb855ec-212a-457a-a4ff-55e9d97323b7 {{(pid=61905) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.299429] env[61905]: DEBUG nova.scheduler.client.report [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Inventory has not changed for provider 9cb855ec-212a-457a-a4ff-55e9d97323b7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61905) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1351.804302] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.599s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.823065] env[61905]: INFO nova.scheduler.client.report [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Deleted allocations for instance bcca8c7b-3e80-4895-ac56-d5aa05d482e5 [ 1352.331578] env[61905]: DEBUG oslo_concurrency.lockutils [None req-76ad0ae9-31d0-4929-a426-8a7aaa156fdd tempest-ServerActionsTestOtherA-1240348434 tempest-ServerActionsTestOtherA-1240348434-project-member] Lock "bcca8c7b-3e80-4895-ac56-d5aa05d482e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.961s {{(pid=61905) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.098650] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.099186] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.405036] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.405202] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61905) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1370.404554] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.404930] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1370.404930] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Cleaning up deleted instances with incomplete migration {{(pid=61905) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1371.907550] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.907907] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Starting heal instance info cache {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1372.913825] env[61905]: DEBUG nova.compute.manager [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Didn't find any instances for network info cache update. {{(pid=61905) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1372.914227] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.405806] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1373.405806] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1376.405065] env[61905]: DEBUG oslo_service.periodic_task [None req-1541d6d8-0ea9-456a-9094-4f2b7ee3ca2e None None] Running periodic task ComputeManager.update_available_resource {{(pid=61905) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}